var/home/core/zuul-output/0000755000175000017500000000000015114607726014536 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015114622424015472 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000005367707215114622416017720 0ustar rootrootDec 05 17:04:20 crc systemd[1]: Starting Kubernetes Kubelet... Dec 05 17:04:20 crc restorecon[4669]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 17:04:20 crc restorecon[4669]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 17:04:21 crc restorecon[4669]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 17:04:21 crc restorecon[4669]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Dec 05 17:04:21 crc kubenswrapper[4753]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 05 17:04:21 crc kubenswrapper[4753]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Dec 05 17:04:21 crc kubenswrapper[4753]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 05 17:04:21 crc kubenswrapper[4753]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 05 17:04:21 crc kubenswrapper[4753]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Dec 05 17:04:21 crc kubenswrapper[4753]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.596546 4753 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.598942 4753 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.598957 4753 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.598962 4753 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.598967 4753 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.598971 4753 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.598976 4753 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.598979 4753 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.598983 4753 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.598987 4753 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.598990 4753 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.598994 4753 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.598998 4753 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.599002 4753 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.599006 4753 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.599009 4753 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.599013 4753 feature_gate.go:330] unrecognized feature gate: Example Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.599016 4753 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.599020 4753 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.599023 4753 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.599027 4753 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.599038 4753 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.599043 4753 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.599046 4753 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.599050 4753 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.599054 4753 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.599057 4753 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.599061 4753 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.599065 4753 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.599070 4753 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.599074 4753 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.599078 4753 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.599082 4753 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.599086 4753 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.599091 4753 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.599095 4753 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.599100 4753 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.599103 4753 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.599107 4753 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.599111 4753 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.599115 4753 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.599118 4753 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.599122 4753 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.599126 4753 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.599130 4753 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.599134 4753 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.599137 4753 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.599141 4753 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.599156 4753 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.599160 4753 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.599163 4753 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.599166 4753 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.599170 4753 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.599173 4753 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.599177 4753 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.599180 4753 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.599184 4753 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.599189 4753 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.599193 4753 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.599196 4753 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.599200 4753 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.599204 4753 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.599208 4753 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.599213 4753 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.599217 4753 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.599220 4753 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.599223 4753 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.599227 4753 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.599231 4753 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.599235 4753 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.599238 4753 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.599242 4753 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599443 4753 flags.go:64] FLAG: --address="0.0.0.0" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599453 4753 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599459 4753 flags.go:64] FLAG: --anonymous-auth="true" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599465 4753 flags.go:64] FLAG: --application-metrics-count-limit="100" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599470 4753 flags.go:64] FLAG: --authentication-token-webhook="false" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599475 4753 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599481 4753 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599486 4753 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599490 4753 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599495 4753 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599499 4753 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599503 4753 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599507 4753 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599512 4753 flags.go:64] FLAG: --cgroup-root="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599515 4753 flags.go:64] FLAG: --cgroups-per-qos="true" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599519 4753 flags.go:64] FLAG: --client-ca-file="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599523 4753 flags.go:64] FLAG: --cloud-config="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599529 4753 flags.go:64] FLAG: --cloud-provider="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599533 4753 flags.go:64] FLAG: --cluster-dns="[]" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599539 4753 flags.go:64] FLAG: --cluster-domain="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599543 4753 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599549 4753 flags.go:64] FLAG: --config-dir="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599554 4753 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599583 4753 flags.go:64] FLAG: --container-log-max-files="5" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599591 4753 flags.go:64] FLAG: --container-log-max-size="10Mi" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599596 4753 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599601 4753 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599607 4753 flags.go:64] FLAG: --containerd-namespace="k8s.io" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599612 4753 flags.go:64] FLAG: --contention-profiling="false" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599616 4753 flags.go:64] FLAG: --cpu-cfs-quota="true" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599620 4753 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599625 4753 flags.go:64] FLAG: --cpu-manager-policy="none" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599629 4753 flags.go:64] FLAG: --cpu-manager-policy-options="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599634 4753 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599638 4753 flags.go:64] FLAG: --enable-controller-attach-detach="true" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599642 4753 flags.go:64] FLAG: --enable-debugging-handlers="true" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599646 4753 flags.go:64] FLAG: --enable-load-reader="false" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599651 4753 flags.go:64] FLAG: --enable-server="true" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599655 4753 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599660 4753 flags.go:64] FLAG: --event-burst="100" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599665 4753 flags.go:64] FLAG: --event-qps="50" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599669 4753 flags.go:64] FLAG: --event-storage-age-limit="default=0" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599673 4753 flags.go:64] FLAG: --event-storage-event-limit="default=0" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599678 4753 flags.go:64] FLAG: --eviction-hard="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599685 4753 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599690 4753 flags.go:64] FLAG: --eviction-minimum-reclaim="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599695 4753 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599700 4753 flags.go:64] FLAG: --eviction-soft="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599705 4753 flags.go:64] FLAG: --eviction-soft-grace-period="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599710 4753 flags.go:64] FLAG: --exit-on-lock-contention="false" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599715 4753 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599720 4753 flags.go:64] FLAG: --experimental-mounter-path="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599725 4753 flags.go:64] FLAG: --fail-cgroupv1="false" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599730 4753 flags.go:64] FLAG: --fail-swap-on="true" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599734 4753 flags.go:64] FLAG: --feature-gates="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599740 4753 flags.go:64] FLAG: --file-check-frequency="20s" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599744 4753 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599750 4753 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599755 4753 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599759 4753 flags.go:64] FLAG: --healthz-port="10248" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599764 4753 flags.go:64] FLAG: --help="false" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599768 4753 flags.go:64] FLAG: --hostname-override="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599772 4753 flags.go:64] FLAG: --housekeeping-interval="10s" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599776 4753 flags.go:64] FLAG: --http-check-frequency="20s" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599780 4753 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599784 4753 flags.go:64] FLAG: --image-credential-provider-config="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599788 4753 flags.go:64] FLAG: --image-gc-high-threshold="85" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599793 4753 flags.go:64] FLAG: --image-gc-low-threshold="80" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599797 4753 flags.go:64] FLAG: --image-service-endpoint="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599801 4753 flags.go:64] FLAG: --kernel-memcg-notification="false" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599805 4753 flags.go:64] FLAG: --kube-api-burst="100" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599810 4753 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599815 4753 flags.go:64] FLAG: --kube-api-qps="50" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599819 4753 flags.go:64] FLAG: --kube-reserved="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599823 4753 flags.go:64] FLAG: --kube-reserved-cgroup="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599827 4753 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599831 4753 flags.go:64] FLAG: --kubelet-cgroups="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599835 4753 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599839 4753 flags.go:64] FLAG: --lock-file="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599843 4753 flags.go:64] FLAG: --log-cadvisor-usage="false" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599848 4753 flags.go:64] FLAG: --log-flush-frequency="5s" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599852 4753 flags.go:64] FLAG: --log-json-info-buffer-size="0" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599858 4753 flags.go:64] FLAG: --log-json-split-stream="false" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599862 4753 flags.go:64] FLAG: --log-text-info-buffer-size="0" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599866 4753 flags.go:64] FLAG: --log-text-split-stream="false" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599870 4753 flags.go:64] FLAG: --logging-format="text" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599874 4753 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599879 4753 flags.go:64] FLAG: --make-iptables-util-chains="true" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599883 4753 flags.go:64] FLAG: --manifest-url="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599887 4753 flags.go:64] FLAG: --manifest-url-header="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599893 4753 flags.go:64] FLAG: --max-housekeeping-interval="15s" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599897 4753 flags.go:64] FLAG: --max-open-files="1000000" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599902 4753 flags.go:64] FLAG: --max-pods="110" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599907 4753 flags.go:64] FLAG: --maximum-dead-containers="-1" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599911 4753 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599915 4753 flags.go:64] FLAG: --memory-manager-policy="None" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599919 4753 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599923 4753 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599927 4753 flags.go:64] FLAG: --node-ip="192.168.126.11" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599931 4753 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599941 4753 flags.go:64] FLAG: --node-status-max-images="50" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599945 4753 flags.go:64] FLAG: --node-status-update-frequency="10s" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599949 4753 flags.go:64] FLAG: --oom-score-adj="-999" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599954 4753 flags.go:64] FLAG: --pod-cidr="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599958 4753 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599964 4753 flags.go:64] FLAG: --pod-manifest-path="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599968 4753 flags.go:64] FLAG: --pod-max-pids="-1" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599972 4753 flags.go:64] FLAG: --pods-per-core="0" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599976 4753 flags.go:64] FLAG: --port="10250" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599980 4753 flags.go:64] FLAG: --protect-kernel-defaults="false" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599985 4753 flags.go:64] FLAG: --provider-id="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599990 4753 flags.go:64] FLAG: --qos-reserved="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599994 4753 flags.go:64] FLAG: --read-only-port="10255" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.599998 4753 flags.go:64] FLAG: --register-node="true" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.600002 4753 flags.go:64] FLAG: --register-schedulable="true" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.600006 4753 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.600020 4753 flags.go:64] FLAG: --registry-burst="10" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.600025 4753 flags.go:64] FLAG: --registry-qps="5" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.600029 4753 flags.go:64] FLAG: --reserved-cpus="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.600032 4753 flags.go:64] FLAG: --reserved-memory="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.600038 4753 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.600042 4753 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.600046 4753 flags.go:64] FLAG: --rotate-certificates="false" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.600050 4753 flags.go:64] FLAG: --rotate-server-certificates="false" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.600054 4753 flags.go:64] FLAG: --runonce="false" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.600058 4753 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.600062 4753 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.600066 4753 flags.go:64] FLAG: --seccomp-default="false" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.600070 4753 flags.go:64] FLAG: --serialize-image-pulls="true" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.600075 4753 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.600079 4753 flags.go:64] FLAG: --storage-driver-db="cadvisor" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.600084 4753 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.600088 4753 flags.go:64] FLAG: --storage-driver-password="root" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.600092 4753 flags.go:64] FLAG: --storage-driver-secure="false" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.600096 4753 flags.go:64] FLAG: --storage-driver-table="stats" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.600100 4753 flags.go:64] FLAG: --storage-driver-user="root" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.600104 4753 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.600108 4753 flags.go:64] FLAG: --sync-frequency="1m0s" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.600112 4753 flags.go:64] FLAG: --system-cgroups="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.600116 4753 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.600123 4753 flags.go:64] FLAG: --system-reserved-cgroup="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.600130 4753 flags.go:64] FLAG: --tls-cert-file="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.600135 4753 flags.go:64] FLAG: --tls-cipher-suites="[]" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.600155 4753 flags.go:64] FLAG: --tls-min-version="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.600160 4753 flags.go:64] FLAG: --tls-private-key-file="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.600164 4753 flags.go:64] FLAG: --topology-manager-policy="none" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.600168 4753 flags.go:64] FLAG: --topology-manager-policy-options="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.600173 4753 flags.go:64] FLAG: --topology-manager-scope="container" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.600177 4753 flags.go:64] FLAG: --v="2" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.600185 4753 flags.go:64] FLAG: --version="false" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.600191 4753 flags.go:64] FLAG: --vmodule="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.600196 4753 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.600201 4753 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.600294 4753 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.600298 4753 feature_gate.go:330] unrecognized feature gate: Example Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.600302 4753 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.600307 4753 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.600311 4753 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.600315 4753 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.600319 4753 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.600322 4753 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.600326 4753 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.600329 4753 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.600333 4753 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.600336 4753 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.600342 4753 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.600345 4753 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.600349 4753 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.600353 4753 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.600357 4753 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.600362 4753 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.600366 4753 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.600369 4753 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.600378 4753 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.600382 4753 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.600385 4753 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.600390 4753 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.600394 4753 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.600398 4753 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.600402 4753 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.600406 4753 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.600410 4753 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.600413 4753 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.600417 4753 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.600421 4753 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.600424 4753 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.600427 4753 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.600431 4753 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.600434 4753 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.600438 4753 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.600441 4753 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.600445 4753 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.600448 4753 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.600452 4753 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.600456 4753 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.600459 4753 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.600462 4753 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.600467 4753 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.600471 4753 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.600475 4753 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.600479 4753 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.600484 4753 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.600488 4753 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.600492 4753 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.600495 4753 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.600501 4753 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.600505 4753 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.600509 4753 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.600512 4753 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.600515 4753 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.600519 4753 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.600523 4753 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.600526 4753 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.600529 4753 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.600533 4753 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.600537 4753 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.600541 4753 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.600545 4753 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.600548 4753 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.600552 4753 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.600556 4753 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.600559 4753 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.600563 4753 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.600566 4753 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.600571 4753 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.610739 4753 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.611218 4753 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.611283 4753 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.611291 4753 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.611297 4753 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.611302 4753 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.611307 4753 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.611312 4753 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.611316 4753 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.611320 4753 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.611325 4753 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.611328 4753 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.611332 4753 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.611336 4753 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.611340 4753 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.611344 4753 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.611348 4753 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.611352 4753 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.611355 4753 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.611359 4753 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.611362 4753 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.611366 4753 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.611369 4753 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.611373 4753 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.611376 4753 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.611381 4753 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.611385 4753 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.611389 4753 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.611393 4753 feature_gate.go:330] unrecognized feature gate: Example Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.611398 4753 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.611402 4753 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.611406 4753 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.611410 4753 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.611413 4753 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.611417 4753 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.611421 4753 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.611424 4753 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.611427 4753 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.611431 4753 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.611435 4753 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.611439 4753 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.611443 4753 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.611447 4753 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.611450 4753 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.611454 4753 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.611458 4753 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.611461 4753 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.611465 4753 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.611468 4753 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.611472 4753 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.611476 4753 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.611480 4753 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.611484 4753 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.611491 4753 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.611496 4753 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.611501 4753 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.611506 4753 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.611510 4753 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.611515 4753 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.611519 4753 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.611522 4753 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.611526 4753 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.611531 4753 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.611536 4753 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.611540 4753 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.611574 4753 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.611578 4753 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.611582 4753 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.611587 4753 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.611593 4753 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.611597 4753 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.611602 4753 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.611606 4753 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.611613 4753 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.612230 4753 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.612244 4753 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.612248 4753 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.612253 4753 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.612258 4753 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.612263 4753 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.612267 4753 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.612271 4753 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.612280 4753 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.612284 4753 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.612287 4753 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.612291 4753 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.612295 4753 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.612299 4753 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.612303 4753 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.612306 4753 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.612310 4753 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.612316 4753 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.612322 4753 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.612326 4753 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.612330 4753 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.612337 4753 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.612341 4753 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.612345 4753 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.612349 4753 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.612353 4753 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.612357 4753 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.612362 4753 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.612366 4753 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.612370 4753 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.612374 4753 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.612378 4753 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.612382 4753 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.612386 4753 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.612392 4753 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.612395 4753 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.612399 4753 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.612403 4753 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.612408 4753 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.612413 4753 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.612417 4753 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.612421 4753 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.612425 4753 feature_gate.go:330] unrecognized feature gate: Example Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.612430 4753 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.612435 4753 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.612439 4753 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.612446 4753 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.612451 4753 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.612456 4753 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.612460 4753 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.612464 4753 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.612468 4753 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.612472 4753 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.612475 4753 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.612479 4753 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.612484 4753 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.612488 4753 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.612493 4753 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.612500 4753 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.612505 4753 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.612510 4753 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.612516 4753 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.612520 4753 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.612524 4753 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.612528 4753 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.612532 4753 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.612536 4753 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.612539 4753 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.612543 4753 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.612547 4753 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.612554 4753 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.612560 4753 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.613095 4753 server.go:940] "Client rotation is on, will bootstrap in background" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.615932 4753 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.616020 4753 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.616488 4753 server.go:997] "Starting client certificate rotation" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.616517 4753 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.616682 4753 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-12-30 00:55:09.986417378 +0000 UTC Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.616763 4753 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 583h50m48.369657157s for next certificate rotation Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.621066 4753 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.622336 4753 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.628519 4753 log.go:25] "Validated CRI v1 runtime API" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.640559 4753 log.go:25] "Validated CRI v1 image API" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.641640 4753 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.644163 4753 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-12-05-17-00-06-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.644188 4753 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.662240 4753 manager.go:217] Machine: {Timestamp:2025-12-05 17:04:21.661024844 +0000 UTC m=+0.164131880 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2800000 MemoryCapacity:33654128640 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:6ae126b1-60b3-4aa4-8711-3da4c1b89426 BootID:6cf3541b-b15a-4035-99a1-dd4a7b86e07c Filesystems:[{Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365412864 Type:vfs Inodes:821634 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:4108170 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827064320 Type:vfs Inodes:4108170 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:98:64:f5 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:98:64:f5 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:9e:34:ce Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:33:f3:ca Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:b0:03:66 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:14:bd:e8 Speed:-1 Mtu:1496} {Name:eth10 MacAddress:12:3c:53:98:dc:1c Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:5e:3b:25:1a:c2:c0 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654128640 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.662492 4753 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.662705 4753 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.665128 4753 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.665613 4753 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.666115 4753 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.666357 4753 topology_manager.go:138] "Creating topology manager with none policy" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.666368 4753 container_manager_linux.go:303] "Creating device plugin manager" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.666764 4753 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.666800 4753 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.667073 4753 state_mem.go:36] "Initialized new in-memory state store" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.667194 4753 server.go:1245] "Using root directory" path="/var/lib/kubelet" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.667890 4753 kubelet.go:418] "Attempting to sync node with API server" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.667920 4753 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.667953 4753 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.667968 4753 kubelet.go:324] "Adding apiserver pod source" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.667979 4753 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.669752 4753 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.670283 4753 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.670915 4753 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.233:6443: connect: connection refused Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.670915 4753 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.233:6443: connect: connection refused Dec 05 17:04:21 crc kubenswrapper[4753]: E1205 17:04:21.671047 4753 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.233:6443: connect: connection refused" logger="UnhandledError" Dec 05 17:04:21 crc kubenswrapper[4753]: E1205 17:04:21.671034 4753 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.233:6443: connect: connection refused" logger="UnhandledError" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.671426 4753 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.672382 4753 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.672430 4753 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.672451 4753 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.672470 4753 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.672501 4753 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.672520 4753 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.672538 4753 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.672567 4753 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.672588 4753 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.672607 4753 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.672633 4753 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.672652 4753 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.673449 4753 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.674402 4753 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.233:6443: connect: connection refused Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.674413 4753 server.go:1280] "Started kubelet" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.674777 4753 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.674799 4753 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.675698 4753 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Dec 05 17:04:21 crc systemd[1]: Started Kubernetes Kubelet. Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.676441 4753 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.676475 4753 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.676555 4753 volume_manager.go:287] "The desired_state_of_world populator starts" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.676578 4753 volume_manager.go:289] "Starting Kubelet Volume Manager" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.676589 4753 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-15 23:40:48.189625914 +0000 UTC Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.676653 4753 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 990h36m26.512979483s for next certificate rotation Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.676695 4753 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Dec 05 17:04:21 crc kubenswrapper[4753]: E1205 17:04:21.676802 4753 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.677522 4753 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.233:6443: connect: connection refused Dec 05 17:04:21 crc kubenswrapper[4753]: E1205 17:04:21.677564 4753 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.233:6443: connect: connection refused" logger="UnhandledError" Dec 05 17:04:21 crc kubenswrapper[4753]: E1205 17:04:21.678350 4753 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.233:6443: connect: connection refused" interval="200ms" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.678895 4753 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.678930 4753 factory.go:55] Registering systemd factory Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.678944 4753 factory.go:221] Registration of the systemd container factory successfully Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.679082 4753 server.go:460] "Adding debug handlers to kubelet server" Dec 05 17:04:21 crc kubenswrapper[4753]: E1205 17:04:21.677673 4753 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.233:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.187e608cc3e95962 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-05 17:04:21.674350946 +0000 UTC m=+0.177457992,LastTimestamp:2025-12-05 17:04:21.674350946 +0000 UTC m=+0.177457992,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.679699 4753 factory.go:153] Registering CRI-O factory Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.679865 4753 factory.go:221] Registration of the crio container factory successfully Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.680020 4753 factory.go:103] Registering Raw factory Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.680243 4753 manager.go:1196] Started watching for new ooms in manager Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.682062 4753 manager.go:319] Starting recovery of all containers Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.695611 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.695706 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.695734 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.695762 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.695787 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.695808 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.695868 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.695889 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.695916 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.695951 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.695972 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.695995 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.696015 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.696044 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.696066 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.696084 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.696103 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.696125 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.696181 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.696206 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.696225 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.696248 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.696269 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.696302 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.696324 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.696347 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.696370 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.696392 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.696411 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.696435 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.696458 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.696482 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.696531 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.696552 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.696574 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.696597 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.696626 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.696655 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.696680 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.696703 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.696731 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.696762 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.696809 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.696829 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.696850 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.696872 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.696894 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.696913 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.696934 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.696959 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.696981 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.697003 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.697034 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.697058 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.697080 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.697102 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.697124 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.697167 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.697191 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.697212 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.697261 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.697283 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.697309 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.697329 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.697355 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.697380 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.697401 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.697424 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.697445 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.697466 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.697489 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.697509 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.697528 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.697548 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.697572 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.697595 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.697616 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.697646 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.697669 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.697690 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.697727 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.698843 4753 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.698904 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.698930 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.698946 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.698963 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.698981 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.698995 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.699008 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.699022 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.699039 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.699055 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.699074 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.699087 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.699097 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.699111 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.699125 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.699139 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.699168 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.699182 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.699196 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.699212 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.699229 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.699242 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.699260 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.699281 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.699298 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.699313 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.699540 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.699565 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.699581 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.699601 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.699617 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.699702 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.699722 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.699738 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.699754 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.699766 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.699779 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.699800 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.699813 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.699826 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.699840 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.699856 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.699869 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.699883 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.699896 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.699909 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.699923 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.699942 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.699954 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.699968 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.699984 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.699996 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.700010 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.700024 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.700041 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.700058 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.700070 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.700082 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.700094 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.700107 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.700118 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.700131 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.700160 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.700173 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.700186 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.700199 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.700212 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.700224 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.700238 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.700253 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.700265 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.700277 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.700289 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.700302 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.700314 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.700325 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.700337 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.700349 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.700363 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.700376 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.700389 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.700401 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.700416 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.700429 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.700447 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.700459 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.700472 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.700484 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.700497 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.700511 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.700528 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.700541 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.700554 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.700567 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.700581 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.700593 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.700606 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.700655 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.700668 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.700681 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.700694 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.700705 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.700717 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.700731 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.700744 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.700756 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.700769 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.700807 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.700819 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.700832 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.700858 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.700870 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.700883 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.700894 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.700911 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.700922 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.700949 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.700960 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.700987 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.701001 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.701017 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.701029 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.701041 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.701052 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.701064 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.701085 4753 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.701098 4753 reconstruct.go:97] "Volume reconstruction finished" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.701105 4753 reconciler.go:26] "Reconciler: start to sync state" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.705609 4753 manager.go:324] Recovery completed Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.715853 4753 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.715906 4753 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.718674 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.718712 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.718722 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.719156 4753 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.719191 4753 status_manager.go:217] "Starting to sync pod status with apiserver" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.719221 4753 kubelet.go:2335] "Starting kubelet main sync loop" Dec 05 17:04:21 crc kubenswrapper[4753]: E1205 17:04:21.719272 4753 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.719623 4753 cpu_manager.go:225] "Starting CPU manager" policy="none" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.719641 4753 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.719662 4753 state_mem.go:36] "Initialized new in-memory state store" Dec 05 17:04:21 crc kubenswrapper[4753]: W1205 17:04:21.720390 4753 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.233:6443: connect: connection refused Dec 05 17:04:21 crc kubenswrapper[4753]: E1205 17:04:21.720451 4753 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.233:6443: connect: connection refused" logger="UnhandledError" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.770007 4753 policy_none.go:49] "None policy: Start" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.771685 4753 memory_manager.go:170] "Starting memorymanager" policy="None" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.771743 4753 state_mem.go:35] "Initializing new in-memory state store" Dec 05 17:04:21 crc kubenswrapper[4753]: E1205 17:04:21.777041 4753 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 05 17:04:21 crc kubenswrapper[4753]: E1205 17:04:21.819885 4753 kubelet.go:2359] "Skipping pod synchronization" err="container runtime status check may not have completed yet" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.820694 4753 manager.go:334] "Starting Device Plugin manager" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.820774 4753 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.820797 4753 server.go:79] "Starting device plugin registration server" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.821631 4753 eviction_manager.go:189] "Eviction manager: starting control loop" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.821665 4753 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.821956 4753 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.822051 4753 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.822061 4753 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Dec 05 17:04:21 crc kubenswrapper[4753]: E1205 17:04:21.832620 4753 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 05 17:04:21 crc kubenswrapper[4753]: E1205 17:04:21.879356 4753 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.233:6443: connect: connection refused" interval="400ms" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.922382 4753 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.924001 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.924088 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.924117 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:21 crc kubenswrapper[4753]: I1205 17:04:21.924223 4753 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 17:04:21 crc kubenswrapper[4753]: E1205 17:04:21.925019 4753 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.233:6443: connect: connection refused" node="crc" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.021027 4753 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.021195 4753 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.022504 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.022540 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.022549 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.022663 4753 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.022857 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.022887 4753 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.023591 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.023617 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.023628 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.023732 4753 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.024075 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.024224 4753 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.024264 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.024281 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.024289 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.024402 4753 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.025038 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.025068 4753 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.025503 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.025530 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.025540 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.026008 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.026031 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.026040 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.026121 4753 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.026503 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.026533 4753 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.026844 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.026867 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.026877 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.027001 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.027012 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.027021 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.027602 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.027627 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.027638 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.027753 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.027774 4753 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.028090 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.028119 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.028130 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.028453 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.028473 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.028480 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.106468 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.106518 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.106538 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.106560 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.106632 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.106687 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.106710 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.106732 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.106747 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.106801 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.106819 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.106850 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.106879 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.106893 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.106908 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.126130 4753 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.127488 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.127535 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.127544 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.127568 4753 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 17:04:22 crc kubenswrapper[4753]: E1205 17:04:22.128016 4753 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.233:6443: connect: connection refused" node="crc" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.207829 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.207885 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.207908 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.207924 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.207943 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.207958 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.207973 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.207989 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.208007 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.208010 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.208076 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.208024 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.208172 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.208177 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.208213 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.208078 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.208025 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.208311 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.208319 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.208353 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.208370 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.208387 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.208395 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.208419 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.208402 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.208442 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.208100 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.208497 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.208522 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.208210 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 17:04:22 crc kubenswrapper[4753]: E1205 17:04:22.280954 4753 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.233:6443: connect: connection refused" interval="800ms" Dec 05 17:04:22 crc kubenswrapper[4753]: E1205 17:04:22.288331 4753 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.233:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.187e608cc3e95962 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-05 17:04:21.674350946 +0000 UTC m=+0.177457992,LastTimestamp:2025-12-05 17:04:21.674350946 +0000 UTC m=+0.177457992,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.359355 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.367776 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.383853 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 05 17:04:22 crc kubenswrapper[4753]: W1205 17:04:22.399135 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-8e67ebe8ebfb6b6a4603250c1af063abfeba6156ba2c18c26194291f15198b26 WatchSource:0}: Error finding container 8e67ebe8ebfb6b6a4603250c1af063abfeba6156ba2c18c26194291f15198b26: Status 404 returned error can't find the container with id 8e67ebe8ebfb6b6a4603250c1af063abfeba6156ba2c18c26194291f15198b26 Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.412664 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.416416 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 17:04:22 crc kubenswrapper[4753]: W1205 17:04:22.436104 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-9faf8a9afe82f681777cd7c58ab0f8138ba44a40154cbd684e5f949e9e080187 WatchSource:0}: Error finding container 9faf8a9afe82f681777cd7c58ab0f8138ba44a40154cbd684e5f949e9e080187: Status 404 returned error can't find the container with id 9faf8a9afe82f681777cd7c58ab0f8138ba44a40154cbd684e5f949e9e080187 Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.528481 4753 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.530112 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.530163 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.530176 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.530196 4753 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 17:04:22 crc kubenswrapper[4753]: E1205 17:04:22.530566 4753 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.233:6443: connect: connection refused" node="crc" Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.675821 4753 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.233:6443: connect: connection refused Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.724020 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"9faf8a9afe82f681777cd7c58ab0f8138ba44a40154cbd684e5f949e9e080187"} Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.725123 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"d13a3c3c825b6cb67aac98a1bb282802df8af0f43cc7c82bf347f6dc3fc8a785"} Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.727644 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"8e67ebe8ebfb6b6a4603250c1af063abfeba6156ba2c18c26194291f15198b26"} Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.728505 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"f2b82c9b0984822d4653728ea247513afddd788e49b4b3c1fee17eda014310a9"} Dec 05 17:04:22 crc kubenswrapper[4753]: I1205 17:04:22.729215 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"be581f5461390d4124c3fbb1788ee766a0968c0a07dd131d736ed40ae94b9ec9"} Dec 05 17:04:22 crc kubenswrapper[4753]: W1205 17:04:22.732273 4753 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.233:6443: connect: connection refused Dec 05 17:04:22 crc kubenswrapper[4753]: E1205 17:04:22.732354 4753 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.233:6443: connect: connection refused" logger="UnhandledError" Dec 05 17:04:22 crc kubenswrapper[4753]: W1205 17:04:22.788495 4753 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.233:6443: connect: connection refused Dec 05 17:04:22 crc kubenswrapper[4753]: E1205 17:04:22.788597 4753 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.233:6443: connect: connection refused" logger="UnhandledError" Dec 05 17:04:22 crc kubenswrapper[4753]: W1205 17:04:22.807952 4753 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.233:6443: connect: connection refused Dec 05 17:04:22 crc kubenswrapper[4753]: E1205 17:04:22.808080 4753 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.233:6443: connect: connection refused" logger="UnhandledError" Dec 05 17:04:23 crc kubenswrapper[4753]: W1205 17:04:23.044740 4753 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.233:6443: connect: connection refused Dec 05 17:04:23 crc kubenswrapper[4753]: E1205 17:04:23.044844 4753 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.233:6443: connect: connection refused" logger="UnhandledError" Dec 05 17:04:23 crc kubenswrapper[4753]: E1205 17:04:23.082514 4753 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.233:6443: connect: connection refused" interval="1.6s" Dec 05 17:04:23 crc kubenswrapper[4753]: I1205 17:04:23.330725 4753 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:04:23 crc kubenswrapper[4753]: I1205 17:04:23.332968 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:23 crc kubenswrapper[4753]: I1205 17:04:23.333042 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:23 crc kubenswrapper[4753]: I1205 17:04:23.333057 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:23 crc kubenswrapper[4753]: I1205 17:04:23.333093 4753 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 17:04:23 crc kubenswrapper[4753]: E1205 17:04:23.333884 4753 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.233:6443: connect: connection refused" node="crc" Dec 05 17:04:23 crc kubenswrapper[4753]: I1205 17:04:23.675327 4753 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.233:6443: connect: connection refused Dec 05 17:04:23 crc kubenswrapper[4753]: I1205 17:04:23.734210 4753 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="498f195e044debe4363789dc7c265123a594ed15546a33aff7aeca45163d4a08" exitCode=0 Dec 05 17:04:23 crc kubenswrapper[4753]: I1205 17:04:23.735193 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"498f195e044debe4363789dc7c265123a594ed15546a33aff7aeca45163d4a08"} Dec 05 17:04:23 crc kubenswrapper[4753]: I1205 17:04:23.735385 4753 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:04:23 crc kubenswrapper[4753]: I1205 17:04:23.736312 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:23 crc kubenswrapper[4753]: I1205 17:04:23.736710 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:23 crc kubenswrapper[4753]: I1205 17:04:23.736778 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:23 crc kubenswrapper[4753]: I1205 17:04:23.738188 4753 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd" exitCode=0 Dec 05 17:04:23 crc kubenswrapper[4753]: I1205 17:04:23.738360 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd"} Dec 05 17:04:23 crc kubenswrapper[4753]: I1205 17:04:23.738357 4753 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:04:23 crc kubenswrapper[4753]: I1205 17:04:23.739999 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:23 crc kubenswrapper[4753]: I1205 17:04:23.740198 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:23 crc kubenswrapper[4753]: I1205 17:04:23.740534 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:23 crc kubenswrapper[4753]: I1205 17:04:23.741058 4753 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="5e42b55a106572b6b023a9d60bea634c6a6d7600989d23ba444a6366fd4980a9" exitCode=0 Dec 05 17:04:23 crc kubenswrapper[4753]: I1205 17:04:23.741111 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"5e42b55a106572b6b023a9d60bea634c6a6d7600989d23ba444a6366fd4980a9"} Dec 05 17:04:23 crc kubenswrapper[4753]: I1205 17:04:23.741204 4753 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:04:23 crc kubenswrapper[4753]: I1205 17:04:23.741999 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:23 crc kubenswrapper[4753]: I1205 17:04:23.742020 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:23 crc kubenswrapper[4753]: I1205 17:04:23.742029 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:23 crc kubenswrapper[4753]: I1205 17:04:23.742461 4753 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:04:23 crc kubenswrapper[4753]: I1205 17:04:23.743314 4753 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="e1bd3444395954ccae8d5c8137c6ebd7f62a6c864cea89f98a137af3e26cda8f" exitCode=0 Dec 05 17:04:23 crc kubenswrapper[4753]: I1205 17:04:23.743359 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"e1bd3444395954ccae8d5c8137c6ebd7f62a6c864cea89f98a137af3e26cda8f"} Dec 05 17:04:23 crc kubenswrapper[4753]: I1205 17:04:23.743424 4753 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:04:23 crc kubenswrapper[4753]: I1205 17:04:23.744400 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:23 crc kubenswrapper[4753]: I1205 17:04:23.744418 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:23 crc kubenswrapper[4753]: I1205 17:04:23.744426 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:23 crc kubenswrapper[4753]: I1205 17:04:23.744754 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:23 crc kubenswrapper[4753]: I1205 17:04:23.744950 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:23 crc kubenswrapper[4753]: I1205 17:04:23.745126 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:23 crc kubenswrapper[4753]: I1205 17:04:23.746930 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"3a662a8ca2ca9bcf02ace463bf26e44293b9776c69f3f1f84110392c3cab6e83"} Dec 05 17:04:23 crc kubenswrapper[4753]: I1205 17:04:23.747045 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"e7886d72b05768ec1d515cfb2837f52ae16e82445b72a3016762308b80892d55"} Dec 05 17:04:23 crc kubenswrapper[4753]: I1205 17:04:23.747126 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"1e83b48779d4b8b709f9bd7351180040315002f738ada5ab034c883f87ef8734"} Dec 05 17:04:23 crc kubenswrapper[4753]: I1205 17:04:23.747258 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"0f6e27e109eeb52dae4e4b35bdf84e78f0786fc8d15c2498dd8d8c70c3d64e4d"} Dec 05 17:04:23 crc kubenswrapper[4753]: I1205 17:04:23.747408 4753 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:04:23 crc kubenswrapper[4753]: I1205 17:04:23.748295 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:23 crc kubenswrapper[4753]: I1205 17:04:23.748431 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:23 crc kubenswrapper[4753]: I1205 17:04:23.748506 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:23 crc kubenswrapper[4753]: I1205 17:04:23.766091 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 17:04:24 crc kubenswrapper[4753]: I1205 17:04:24.754341 4753 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:04:24 crc kubenswrapper[4753]: I1205 17:04:24.754869 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"95027e80190e6caa0524ede7ef31c38c8861aeca94936b1a7a819fdc650c7969"} Dec 05 17:04:24 crc kubenswrapper[4753]: I1205 17:04:24.754905 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"cf90148205c7fde7c73b3ed1130fa5c9fc4b6562c0e1f4f6b009a72677f32f22"} Dec 05 17:04:24 crc kubenswrapper[4753]: I1205 17:04:24.754917 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"bba91e82256742fa5b98e2e481b8ab267ce89d71e180581f34fa371d03bbc1c5"} Dec 05 17:04:24 crc kubenswrapper[4753]: I1205 17:04:24.755113 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:24 crc kubenswrapper[4753]: I1205 17:04:24.755135 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:24 crc kubenswrapper[4753]: I1205 17:04:24.755156 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:24 crc kubenswrapper[4753]: I1205 17:04:24.757493 4753 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="57f8f3748e2769dfee756be6f04df3aaaa6e9ea410631aaec4c2b0fb36380aba" exitCode=0 Dec 05 17:04:24 crc kubenswrapper[4753]: I1205 17:04:24.757565 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"57f8f3748e2769dfee756be6f04df3aaaa6e9ea410631aaec4c2b0fb36380aba"} Dec 05 17:04:24 crc kubenswrapper[4753]: I1205 17:04:24.757574 4753 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:04:24 crc kubenswrapper[4753]: I1205 17:04:24.758307 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:24 crc kubenswrapper[4753]: I1205 17:04:24.758335 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:24 crc kubenswrapper[4753]: I1205 17:04:24.758347 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:24 crc kubenswrapper[4753]: I1205 17:04:24.761842 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"0585e546af65a61265267b97083556b811ad35ee6c48fa84262b3f4f25eaf907"} Dec 05 17:04:24 crc kubenswrapper[4753]: I1205 17:04:24.761865 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"d3a5f37b26bfe7933088e08379662250e7b188efbbb5784d49e8d86262e10416"} Dec 05 17:04:24 crc kubenswrapper[4753]: I1205 17:04:24.761877 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"c0061bf86592b301a0a9398d2938e614f76616ed7ddcf310aa50c961632fb58b"} Dec 05 17:04:24 crc kubenswrapper[4753]: I1205 17:04:24.761888 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"195b8bf18e6cf937def214e377ec9def799c19ea4d5f008b66cc42048656ffce"} Dec 05 17:04:24 crc kubenswrapper[4753]: I1205 17:04:24.765816 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"8012c6347b457094531c3dfc5c7f18bec3f7cebb83d341ab1ed030443d6f4455"} Dec 05 17:04:24 crc kubenswrapper[4753]: I1205 17:04:24.765849 4753 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:04:24 crc kubenswrapper[4753]: I1205 17:04:24.765867 4753 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:04:24 crc kubenswrapper[4753]: I1205 17:04:24.766962 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:24 crc kubenswrapper[4753]: I1205 17:04:24.766985 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:24 crc kubenswrapper[4753]: I1205 17:04:24.766995 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:24 crc kubenswrapper[4753]: I1205 17:04:24.767441 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:24 crc kubenswrapper[4753]: I1205 17:04:24.767463 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:24 crc kubenswrapper[4753]: I1205 17:04:24.767474 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:24 crc kubenswrapper[4753]: I1205 17:04:24.934023 4753 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:04:24 crc kubenswrapper[4753]: I1205 17:04:24.935466 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:24 crc kubenswrapper[4753]: I1205 17:04:24.935515 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:24 crc kubenswrapper[4753]: I1205 17:04:24.935528 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:24 crc kubenswrapper[4753]: I1205 17:04:24.935559 4753 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 17:04:24 crc kubenswrapper[4753]: I1205 17:04:24.976224 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 17:04:24 crc kubenswrapper[4753]: I1205 17:04:24.981674 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 17:04:25 crc kubenswrapper[4753]: I1205 17:04:25.770575 4753 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="8d0e87a821df2df8d12a9d1ebfc28918e04955cfddb52ecc5fdf17595715ed05" exitCode=0 Dec 05 17:04:25 crc kubenswrapper[4753]: I1205 17:04:25.770669 4753 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:04:25 crc kubenswrapper[4753]: I1205 17:04:25.770664 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"8d0e87a821df2df8d12a9d1ebfc28918e04955cfddb52ecc5fdf17595715ed05"} Dec 05 17:04:25 crc kubenswrapper[4753]: I1205 17:04:25.772126 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:25 crc kubenswrapper[4753]: I1205 17:04:25.772166 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:25 crc kubenswrapper[4753]: I1205 17:04:25.772174 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:25 crc kubenswrapper[4753]: I1205 17:04:25.775064 4753 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 17:04:25 crc kubenswrapper[4753]: I1205 17:04:25.775053 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"0fea878c0275a3934d26327c13cf5a93ae3b881132788f3d031af5b382bbdd02"} Dec 05 17:04:25 crc kubenswrapper[4753]: I1205 17:04:25.775113 4753 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:04:25 crc kubenswrapper[4753]: I1205 17:04:25.775095 4753 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:04:25 crc kubenswrapper[4753]: I1205 17:04:25.775179 4753 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:04:25 crc kubenswrapper[4753]: I1205 17:04:25.775492 4753 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:04:25 crc kubenswrapper[4753]: I1205 17:04:25.776886 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:25 crc kubenswrapper[4753]: I1205 17:04:25.776912 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:25 crc kubenswrapper[4753]: I1205 17:04:25.776923 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:25 crc kubenswrapper[4753]: I1205 17:04:25.777004 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:25 crc kubenswrapper[4753]: I1205 17:04:25.777036 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:25 crc kubenswrapper[4753]: I1205 17:04:25.777053 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:25 crc kubenswrapper[4753]: I1205 17:04:25.777422 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:25 crc kubenswrapper[4753]: I1205 17:04:25.777455 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:25 crc kubenswrapper[4753]: I1205 17:04:25.777468 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:25 crc kubenswrapper[4753]: I1205 17:04:25.778222 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:25 crc kubenswrapper[4753]: I1205 17:04:25.778245 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:25 crc kubenswrapper[4753]: I1205 17:04:25.778256 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:26 crc kubenswrapper[4753]: I1205 17:04:26.011491 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 17:04:26 crc kubenswrapper[4753]: I1205 17:04:26.353600 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:04:26 crc kubenswrapper[4753]: I1205 17:04:26.781225 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"d57e4d7cbce488283393f816c9b71f654bf789b6900640a06bd6318b62ed3c51"} Dec 05 17:04:26 crc kubenswrapper[4753]: I1205 17:04:26.781290 4753 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:04:26 crc kubenswrapper[4753]: I1205 17:04:26.781368 4753 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:04:26 crc kubenswrapper[4753]: I1205 17:04:26.781419 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:04:26 crc kubenswrapper[4753]: I1205 17:04:26.782785 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:26 crc kubenswrapper[4753]: I1205 17:04:26.782807 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:26 crc kubenswrapper[4753]: I1205 17:04:26.782815 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:26 crc kubenswrapper[4753]: I1205 17:04:26.782822 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:26 crc kubenswrapper[4753]: I1205 17:04:26.782848 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:26 crc kubenswrapper[4753]: I1205 17:04:26.782857 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:27 crc kubenswrapper[4753]: I1205 17:04:27.787187 4753 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:04:27 crc kubenswrapper[4753]: I1205 17:04:27.787266 4753 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:04:27 crc kubenswrapper[4753]: I1205 17:04:27.787187 4753 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:04:27 crc kubenswrapper[4753]: I1205 17:04:27.787183 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"2aad9ac38a4559b5429c6e6084e4ac76d82dedf05857d6dda0213c85daade35e"} Dec 05 17:04:27 crc kubenswrapper[4753]: I1205 17:04:27.787469 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"a6ab17dafe5558c990006acd7086b246ab4de08cecae110ea0c45c715319105c"} Dec 05 17:04:27 crc kubenswrapper[4753]: I1205 17:04:27.787507 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"fc865793a4e57c985c048cbe1039b14a5d741740398b0668a3357aad17bdc7ee"} Dec 05 17:04:27 crc kubenswrapper[4753]: I1205 17:04:27.787519 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"00ad2b7bc9127cb48d5c66fd08bb103b640668ba8e2f0232e96c7ff35fc81d02"} Dec 05 17:04:27 crc kubenswrapper[4753]: I1205 17:04:27.788005 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:27 crc kubenswrapper[4753]: I1205 17:04:27.788028 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:27 crc kubenswrapper[4753]: I1205 17:04:27.788037 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:27 crc kubenswrapper[4753]: I1205 17:04:27.788085 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:27 crc kubenswrapper[4753]: I1205 17:04:27.788105 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:27 crc kubenswrapper[4753]: I1205 17:04:27.788113 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:27 crc kubenswrapper[4753]: I1205 17:04:27.788806 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:27 crc kubenswrapper[4753]: I1205 17:04:27.788839 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:27 crc kubenswrapper[4753]: I1205 17:04:27.788850 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:28 crc kubenswrapper[4753]: I1205 17:04:28.680565 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Dec 05 17:04:28 crc kubenswrapper[4753]: I1205 17:04:28.789096 4753 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:04:28 crc kubenswrapper[4753]: I1205 17:04:28.790080 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:28 crc kubenswrapper[4753]: I1205 17:04:28.790136 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:28 crc kubenswrapper[4753]: I1205 17:04:28.790181 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:29 crc kubenswrapper[4753]: I1205 17:04:29.791971 4753 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:04:29 crc kubenswrapper[4753]: I1205 17:04:29.793398 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:29 crc kubenswrapper[4753]: I1205 17:04:29.793471 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:29 crc kubenswrapper[4753]: I1205 17:04:29.793491 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:30 crc kubenswrapper[4753]: I1205 17:04:30.069201 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Dec 05 17:04:30 crc kubenswrapper[4753]: I1205 17:04:30.222344 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:04:30 crc kubenswrapper[4753]: I1205 17:04:30.222577 4753 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:04:30 crc kubenswrapper[4753]: I1205 17:04:30.223939 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:30 crc kubenswrapper[4753]: I1205 17:04:30.223979 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:30 crc kubenswrapper[4753]: I1205 17:04:30.223998 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:30 crc kubenswrapper[4753]: I1205 17:04:30.360945 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 17:04:30 crc kubenswrapper[4753]: I1205 17:04:30.361099 4753 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:04:30 crc kubenswrapper[4753]: I1205 17:04:30.362130 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:30 crc kubenswrapper[4753]: I1205 17:04:30.362188 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:30 crc kubenswrapper[4753]: I1205 17:04:30.362204 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:30 crc kubenswrapper[4753]: I1205 17:04:30.794033 4753 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:04:30 crc kubenswrapper[4753]: I1205 17:04:30.795050 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:30 crc kubenswrapper[4753]: I1205 17:04:30.795084 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:30 crc kubenswrapper[4753]: I1205 17:04:30.795098 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:31 crc kubenswrapper[4753]: I1205 17:04:31.120710 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 17:04:31 crc kubenswrapper[4753]: I1205 17:04:31.120881 4753 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:04:31 crc kubenswrapper[4753]: I1205 17:04:31.121917 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:31 crc kubenswrapper[4753]: I1205 17:04:31.121948 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:31 crc kubenswrapper[4753]: I1205 17:04:31.121959 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:31 crc kubenswrapper[4753]: E1205 17:04:31.833040 4753 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 05 17:04:33 crc kubenswrapper[4753]: I1205 17:04:33.770899 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 17:04:33 crc kubenswrapper[4753]: I1205 17:04:33.771019 4753 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:04:33 crc kubenswrapper[4753]: I1205 17:04:33.771972 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:33 crc kubenswrapper[4753]: I1205 17:04:33.772007 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:33 crc kubenswrapper[4753]: I1205 17:04:33.772019 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:34 crc kubenswrapper[4753]: I1205 17:04:34.120948 4753 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 05 17:04:34 crc kubenswrapper[4753]: I1205 17:04:34.121034 4753 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 05 17:04:34 crc kubenswrapper[4753]: W1205 17:04:34.440398 4753 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout Dec 05 17:04:34 crc kubenswrapper[4753]: I1205 17:04:34.440504 4753 trace.go:236] Trace[878799282]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (05-Dec-2025 17:04:24.438) (total time: 10002ms): Dec 05 17:04:34 crc kubenswrapper[4753]: Trace[878799282]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10002ms (17:04:34.440) Dec 05 17:04:34 crc kubenswrapper[4753]: Trace[878799282]: [10.002162578s] [10.002162578s] END Dec 05 17:04:34 crc kubenswrapper[4753]: E1205 17:04:34.440525 4753 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Dec 05 17:04:34 crc kubenswrapper[4753]: I1205 17:04:34.676464 4753 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Dec 05 17:04:34 crc kubenswrapper[4753]: E1205 17:04:34.683894 4753 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" interval="3.2s" Dec 05 17:04:34 crc kubenswrapper[4753]: W1205 17:04:34.699908 4753 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": net/http: TLS handshake timeout Dec 05 17:04:34 crc kubenswrapper[4753]: I1205 17:04:34.700074 4753 trace.go:236] Trace[999388350]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (05-Dec-2025 17:04:24.699) (total time: 10001ms): Dec 05 17:04:34 crc kubenswrapper[4753]: Trace[999388350]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10000ms (17:04:34.699) Dec 05 17:04:34 crc kubenswrapper[4753]: Trace[999388350]: [10.001013759s] [10.001013759s] END Dec 05 17:04:34 crc kubenswrapper[4753]: E1205 17:04:34.700120 4753 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Dec 05 17:04:34 crc kubenswrapper[4753]: W1205 17:04:34.896965 4753 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout Dec 05 17:04:34 crc kubenswrapper[4753]: I1205 17:04:34.897057 4753 trace.go:236] Trace[1442588608]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (05-Dec-2025 17:04:24.895) (total time: 10001ms): Dec 05 17:04:34 crc kubenswrapper[4753]: Trace[1442588608]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (17:04:34.896) Dec 05 17:04:34 crc kubenswrapper[4753]: Trace[1442588608]: [10.001790022s] [10.001790022s] END Dec 05 17:04:34 crc kubenswrapper[4753]: E1205 17:04:34.897080 4753 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Dec 05 17:04:34 crc kubenswrapper[4753]: E1205 17:04:34.936533 4753 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": net/http: TLS handshake timeout" node="crc" Dec 05 17:04:35 crc kubenswrapper[4753]: I1205 17:04:35.554672 4753 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 05 17:04:35 crc kubenswrapper[4753]: I1205 17:04:35.554744 4753 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 05 17:04:35 crc kubenswrapper[4753]: I1205 17:04:35.561912 4753 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 05 17:04:35 crc kubenswrapper[4753]: I1205 17:04:35.561986 4753 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 05 17:04:38 crc kubenswrapper[4753]: I1205 17:04:38.137566 4753 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:04:38 crc kubenswrapper[4753]: I1205 17:04:38.139865 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:38 crc kubenswrapper[4753]: I1205 17:04:38.139926 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:38 crc kubenswrapper[4753]: I1205 17:04:38.139944 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:38 crc kubenswrapper[4753]: I1205 17:04:38.139975 4753 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 17:04:38 crc kubenswrapper[4753]: E1205 17:04:38.144930 4753 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Dec 05 17:04:38 crc kubenswrapper[4753]: I1205 17:04:38.700465 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Dec 05 17:04:38 crc kubenswrapper[4753]: I1205 17:04:38.700636 4753 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:04:38 crc kubenswrapper[4753]: I1205 17:04:38.701569 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:38 crc kubenswrapper[4753]: I1205 17:04:38.701600 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:38 crc kubenswrapper[4753]: I1205 17:04:38.701613 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:38 crc kubenswrapper[4753]: I1205 17:04:38.710987 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Dec 05 17:04:38 crc kubenswrapper[4753]: I1205 17:04:38.813531 4753 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:04:38 crc kubenswrapper[4753]: I1205 17:04:38.814467 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:38 crc kubenswrapper[4753]: I1205 17:04:38.814511 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:38 crc kubenswrapper[4753]: I1205 17:04:38.814522 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:38 crc kubenswrapper[4753]: I1205 17:04:38.853442 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:04:38 crc kubenswrapper[4753]: I1205 17:04:38.853615 4753 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:04:38 crc kubenswrapper[4753]: I1205 17:04:38.856032 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:38 crc kubenswrapper[4753]: I1205 17:04:38.856065 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:38 crc kubenswrapper[4753]: I1205 17:04:38.856076 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:38 crc kubenswrapper[4753]: I1205 17:04:38.858192 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:04:39 crc kubenswrapper[4753]: I1205 17:04:39.815811 4753 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 17:04:39 crc kubenswrapper[4753]: I1205 17:04:39.815863 4753 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:04:39 crc kubenswrapper[4753]: I1205 17:04:39.816899 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:39 crc kubenswrapper[4753]: I1205 17:04:39.816982 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:39 crc kubenswrapper[4753]: I1205 17:04:39.817009 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:39 crc kubenswrapper[4753]: I1205 17:04:39.981114 4753 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.439760 4753 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.548021 4753 trace.go:236] Trace[1084287143]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (05-Dec-2025 17:04:25.912) (total time: 14635ms): Dec 05 17:04:40 crc kubenswrapper[4753]: Trace[1084287143]: ---"Objects listed" error: 14635ms (17:04:40.547) Dec 05 17:04:40 crc kubenswrapper[4753]: Trace[1084287143]: [14.635638159s] [14.635638159s] END Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.548051 4753 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.563436 4753 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.612261 4753 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:40388->192.168.126.11:17697: read: connection reset by peer" start-of-body= Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.612316 4753 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:40388->192.168.126.11:17697: read: connection reset by peer" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.612608 4753 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.612650 4753 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.649381 4753 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.677788 4753 apiserver.go:52] "Watching apiserver" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.681336 4753 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.681570 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb"] Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.681901 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.682143 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.682187 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.682208 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:04:40 crc kubenswrapper[4753]: E1205 17:04:40.682315 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:04:40 crc kubenswrapper[4753]: E1205 17:04:40.682418 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.682722 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.682741 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 17:04:40 crc kubenswrapper[4753]: E1205 17:04:40.682774 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.683871 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.684414 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.684427 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.685006 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.685123 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.685352 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.685356 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.686212 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.686278 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.711366 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.724346 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.735892 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.746163 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.759079 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.769359 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.779474 4753 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.782049 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.792769 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.803530 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.814354 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.821310 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.823161 4753 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="0fea878c0275a3934d26327c13cf5a93ae3b881132788f3d031af5b382bbdd02" exitCode=255 Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.823221 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"0fea878c0275a3934d26327c13cf5a93ae3b881132788f3d031af5b382bbdd02"} Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.835234 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.848422 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.865982 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.866026 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.866056 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.866078 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.866100 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.866117 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.866133 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.866166 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.866182 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.866197 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.866230 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.866997 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.866599 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.867028 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.866624 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.867079 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.866787 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.866925 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.867085 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.867103 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.867190 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.867214 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.867214 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.867241 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.867261 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.867278 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.867295 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.867313 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.867330 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.867335 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.867353 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.867349 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.867407 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.867428 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.867445 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.867465 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.867471 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.867485 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.867502 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.867510 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.867518 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.867558 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.867570 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.867600 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.867635 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.867656 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.867672 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.867690 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.867708 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.867725 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.867772 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.867798 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.867817 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.867836 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.867853 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.867871 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.867888 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.867907 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.867924 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.867960 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.867984 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.868005 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.868060 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.868085 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.868109 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.868134 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.868191 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.868218 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.868244 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.868268 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.868291 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.868351 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.868371 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.868395 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.868420 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.868441 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.868463 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.868487 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.868509 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.868532 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.868592 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.868618 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.868644 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.868700 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.868728 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.868751 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.868773 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.867672 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.868796 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.867680 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.867751 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.867784 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.868826 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.868955 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.868977 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.868998 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.869017 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.869067 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.869087 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.869104 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.869120 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.869140 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.869173 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.869188 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.869203 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.869221 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.869241 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.869257 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.869274 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.869295 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.869313 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.869334 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.869352 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.869370 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.869388 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.869408 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.869428 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.869497 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.869516 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.869533 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.869551 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.869570 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.869588 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.869606 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.869627 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.869645 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.869660 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.869678 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.869696 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.869714 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.869730 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.869747 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.869766 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.869782 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.869798 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.869816 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.869833 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.869850 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.869867 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.869884 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.869902 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.869921 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.869940 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.869958 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.869975 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.869994 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.870010 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.870027 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.870043 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.870067 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.870087 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.870110 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.870128 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.870734 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.870754 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.870769 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.870786 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.870807 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.870823 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.870858 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.870877 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.870893 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.870909 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.870926 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.870941 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.870958 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.870974 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.870991 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.871007 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.871028 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.871065 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.871082 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.871100 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.871117 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.871137 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.871166 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.871183 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.871199 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.871218 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.871236 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.871254 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.871273 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.871290 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.871309 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.871331 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.871348 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.871366 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.871383 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.871401 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.871421 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.871437 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.871454 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.871471 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.871487 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.871504 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.871522 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.871539 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.871556 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.871574 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.871590 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.871609 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.871626 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.871643 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.871658 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.871677 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.871696 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.871713 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.871731 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.871751 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.871768 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.871784 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.871801 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.871917 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.871940 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.871959 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.871980 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.872002 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.872022 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.872045 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.872064 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.872081 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.872099 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.872120 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.872139 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.872188 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.872209 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.872278 4753 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.872292 4753 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.872316 4753 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.872328 4753 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.872340 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.872350 4753 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.872362 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.872372 4753 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.872382 4753 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.872391 4753 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.872402 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.872413 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.872423 4753 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.872434 4753 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.872444 4753 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.872454 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.889355 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.895362 4753 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.867825 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.867811 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.867798 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.867877 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.867894 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.867955 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.867962 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.868082 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.868145 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.868205 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.868292 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.868467 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.868460 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.868566 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.868608 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.868783 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.868814 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.868810 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.868876 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.868897 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.868937 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.868967 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.869048 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.869203 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.869258 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.869294 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.869360 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.869464 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.869795 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.869822 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.869951 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.870041 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.870059 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.870081 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.870084 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.870092 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.872465 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.874350 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.875584 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.878337 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.878513 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.881859 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.883080 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.883632 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.885323 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.888331 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.888984 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.889220 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.889317 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.889549 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.889706 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.889778 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.889783 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.889963 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.889988 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.890102 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.890178 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.890254 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.890231 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.890315 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.890447 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.892438 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.892601 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.893374 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.894072 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.894239 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.894314 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.896000 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.896453 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.896637 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.898178 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: E1205 17:04:40.899922 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:04:41.399894634 +0000 UTC m=+19.903001640 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.901193 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.904470 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.904859 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.905097 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.905367 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.912904 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.912942 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.912984 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.913367 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.917088 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.917206 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 17:04:40 crc kubenswrapper[4753]: E1205 17:04:40.917314 4753 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 17:04:40 crc kubenswrapper[4753]: E1205 17:04:40.917373 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 17:04:41.417356209 +0000 UTC m=+19.920463215 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.917737 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.917808 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.918085 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.918230 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.918298 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.918761 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.918812 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.919009 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.919026 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.919184 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.919218 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.919265 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.919332 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.919453 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.919540 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.919545 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.919593 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: E1205 17:04:40.919744 4753 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.919779 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 17:04:40 crc kubenswrapper[4753]: E1205 17:04:40.919912 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 17:04:41.419886555 +0000 UTC m=+19.922993561 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.919934 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.920276 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.918648 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.920390 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.920517 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.920811 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.920901 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.920941 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.920984 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.921084 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.921190 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.921214 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.921373 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.921729 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.921903 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.921933 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.921939 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.922003 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.921765 4753 scope.go:117] "RemoveContainer" containerID="0fea878c0275a3934d26327c13cf5a93ae3b881132788f3d031af5b382bbdd02" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.922675 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: E1205 17:04:40.924201 4753 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 17:04:40 crc kubenswrapper[4753]: E1205 17:04:40.924217 4753 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 17:04:40 crc kubenswrapper[4753]: E1205 17:04:40.924232 4753 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 17:04:40 crc kubenswrapper[4753]: E1205 17:04:40.924291 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 17:04:41.424271044 +0000 UTC m=+19.927378050 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.929799 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.929828 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.930244 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.930908 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.931330 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.931553 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.931875 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.932793 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.933207 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.933485 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.933770 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.935385 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.936687 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: E1205 17:04:40.936786 4753 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 17:04:40 crc kubenswrapper[4753]: E1205 17:04:40.936818 4753 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 17:04:40 crc kubenswrapper[4753]: E1205 17:04:40.936835 4753 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 17:04:40 crc kubenswrapper[4753]: E1205 17:04:40.936957 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 17:04:41.436895704 +0000 UTC m=+19.940002710 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.937439 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.938082 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.938698 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.939580 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.940835 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.941030 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.941192 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.941510 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.941586 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.941715 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.941838 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.942585 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.942826 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.942870 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.943205 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.943355 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.943386 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.944695 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.947559 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.947665 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.950669 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.951304 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.951548 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.951701 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.951847 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.951865 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.951895 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.951908 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.952176 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.952263 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.952466 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.952601 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.952815 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.953484 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.953589 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.953606 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.953847 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.953896 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.954100 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.954363 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.954443 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.954828 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.955990 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.957250 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.957277 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.957309 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.957418 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.957531 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.957570 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.959114 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.963313 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.963660 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.963694 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.964333 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.964718 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973054 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973112 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973175 4753 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973187 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973199 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973208 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973218 4753 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973227 4753 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973237 4753 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973246 4753 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973257 4753 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973266 4753 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973275 4753 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973284 4753 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973294 4753 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973302 4753 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973312 4753 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973321 4753 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973331 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973340 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973349 4753 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973360 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973370 4753 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973380 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973389 4753 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973400 4753 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973411 4753 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973422 4753 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973433 4753 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973443 4753 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973453 4753 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973463 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973474 4753 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973483 4753 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973493 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973503 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973513 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973522 4753 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973532 4753 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973541 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973550 4753 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973559 4753 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973568 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973578 4753 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973588 4753 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973597 4753 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973607 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973616 4753 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973625 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973635 4753 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973645 4753 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973656 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973668 4753 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973677 4753 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973686 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973696 4753 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973706 4753 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973715 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973724 4753 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973732 4753 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973741 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973751 4753 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973760 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973773 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973784 4753 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973794 4753 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973806 4753 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973816 4753 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973828 4753 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973837 4753 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973846 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973864 4753 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973874 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973884 4753 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973893 4753 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973902 4753 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973911 4753 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973923 4753 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973932 4753 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973942 4753 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973951 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973960 4753 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973969 4753 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973977 4753 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973987 4753 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.973997 4753 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974007 4753 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974020 4753 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974029 4753 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974038 4753 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974047 4753 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974056 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974065 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974076 4753 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974087 4753 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974095 4753 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974104 4753 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974112 4753 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974121 4753 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974131 4753 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974140 4753 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974162 4753 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974172 4753 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974182 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974192 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974200 4753 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974209 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974219 4753 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974229 4753 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974237 4753 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974246 4753 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974255 4753 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974264 4753 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974273 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974282 4753 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974292 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974301 4753 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974310 4753 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974321 4753 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974329 4753 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974338 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974347 4753 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974356 4753 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974365 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974374 4753 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974382 4753 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974393 4753 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974402 4753 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974411 4753 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974420 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974428 4753 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974437 4753 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974450 4753 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974459 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974469 4753 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974477 4753 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974486 4753 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974496 4753 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974504 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974512 4753 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974521 4753 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974530 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974540 4753 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974550 4753 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974559 4753 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974569 4753 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974578 4753 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974588 4753 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974598 4753 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974607 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974616 4753 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974660 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974669 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974679 4753 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974689 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974700 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974709 4753 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974718 4753 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974727 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974736 4753 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974744 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974753 4753 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974763 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974771 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974781 4753 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974810 4753 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974820 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974829 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974839 4753 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974848 4753 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974860 4753 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974871 4753 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974882 4753 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974892 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974901 4753 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974911 4753 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974920 4753 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974929 4753 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.974978 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.975172 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.980772 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.992819 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:04:40 crc kubenswrapper[4753]: I1205 17:04:40.994287 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.000916 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.007191 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 17:04:41 crc kubenswrapper[4753]: W1205 17:04:41.011519 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podef543e1b_8068_4ea3_b32a_61027b32e95d.slice/crio-c7235c54b3f40c13b44788f0315301d1593a5c16ff63a13a00b56765e68a2542 WatchSource:0}: Error finding container c7235c54b3f40c13b44788f0315301d1593a5c16ff63a13a00b56765e68a2542: Status 404 returned error can't find the container with id c7235c54b3f40c13b44788f0315301d1593a5c16ff63a13a00b56765e68a2542 Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.075642 4753 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.076128 4753 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.076181 4753 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.479749 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.479874 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.479910 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.479944 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.479982 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:04:41 crc kubenswrapper[4753]: E1205 17:04:41.480130 4753 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 17:04:41 crc kubenswrapper[4753]: E1205 17:04:41.480233 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 17:04:42.48021276 +0000 UTC m=+20.983319766 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 17:04:41 crc kubenswrapper[4753]: E1205 17:04:41.480300 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:04:42.480293753 +0000 UTC m=+20.983400759 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:04:41 crc kubenswrapper[4753]: E1205 17:04:41.480376 4753 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 17:04:41 crc kubenswrapper[4753]: E1205 17:04:41.480413 4753 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 17:04:41 crc kubenswrapper[4753]: E1205 17:04:41.480439 4753 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 17:04:41 crc kubenswrapper[4753]: E1205 17:04:41.480450 4753 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 17:04:41 crc kubenswrapper[4753]: E1205 17:04:41.480413 4753 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 17:04:41 crc kubenswrapper[4753]: E1205 17:04:41.480484 4753 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 17:04:41 crc kubenswrapper[4753]: E1205 17:04:41.480490 4753 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 17:04:41 crc kubenswrapper[4753]: E1205 17:04:41.480471 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 17:04:42.480449598 +0000 UTC m=+20.983556604 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 17:04:41 crc kubenswrapper[4753]: E1205 17:04:41.480524 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 17:04:42.4805156 +0000 UTC m=+20.983622596 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 17:04:41 crc kubenswrapper[4753]: E1205 17:04:41.480536 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 17:04:42.480530851 +0000 UTC m=+20.983637857 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.514525 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.519900 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.529535 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.530266 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee37fdbe-64d5-4bb1-8522-932d83e0648e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://195b8bf18e6cf937def214e377ec9def799c19ea4d5f008b66cc42048656ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a5f37b26bfe7933088e08379662250e7b188efbbb5784d49e8d86262e10416\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0061bf86592b301a0a9398d2938e614f76616ed7ddcf310aa50c961632fb58b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0fea878c0275a3934d26327c13cf5a93ae3b881132788f3d031af5b382bbdd02\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fea878c0275a3934d26327c13cf5a93ae3b881132788f3d031af5b382bbdd02\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 17:04:35.064086 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 17:04:35.066802 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1773129596/tls.crt::/tmp/serving-cert-1773129596/tls.key\\\\\\\"\\\\nI1205 17:04:40.565607 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 17:04:40.585450 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 17:04:40.585477 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 17:04:40.585506 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 17:04:40.585513 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:04:40.600592 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:04:40.600617 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600622 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600626 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:04:40.600629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:04:40.600632 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:04:40.600635 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:04:40.600704 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 17:04:40.602558 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0585e546af65a61265267b97083556b811ad35ee6c48fa84262b3f4f25eaf907\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.543242 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.560830 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.575617 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.590622 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.601551 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.617083 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.628040 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.643299 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.657475 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.668244 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.680701 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.696575 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.710029 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee37fdbe-64d5-4bb1-8522-932d83e0648e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://195b8bf18e6cf937def214e377ec9def799c19ea4d5f008b66cc42048656ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a5f37b26bfe7933088e08379662250e7b188efbbb5784d49e8d86262e10416\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0061bf86592b301a0a9398d2938e614f76616ed7ddcf310aa50c961632fb58b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0fea878c0275a3934d26327c13cf5a93ae3b881132788f3d031af5b382bbdd02\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fea878c0275a3934d26327c13cf5a93ae3b881132788f3d031af5b382bbdd02\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 17:04:35.064086 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 17:04:35.066802 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1773129596/tls.crt::/tmp/serving-cert-1773129596/tls.key\\\\\\\"\\\\nI1205 17:04:40.565607 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 17:04:40.585450 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 17:04:40.585477 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 17:04:40.585506 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 17:04:40.585513 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:04:40.600592 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:04:40.600617 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600622 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600626 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:04:40.600629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:04:40.600632 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:04:40.600635 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:04:40.600704 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 17:04:40.602558 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0585e546af65a61265267b97083556b811ad35ee6c48fa84262b3f4f25eaf907\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.719867 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:04:41 crc kubenswrapper[4753]: E1205 17:04:41.719998 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.723677 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.724291 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.725309 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65284411-042e-46b9-8e55-cabc9e78a397\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e83b48779d4b8b709f9bd7351180040315002f738ada5ab034c883f87ef8734\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f6e27e109eeb52dae4e4b35bdf84e78f0786fc8d15c2498dd8d8c70c3d64e4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7886d72b05768ec1d515cfb2837f52ae16e82445b72a3016762308b80892d55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a662a8ca2ca9bcf02ace463bf26e44293b9776c69f3f1f84110392c3cab6e83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.725659 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.726300 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.727284 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.727761 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.728353 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.729309 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.729914 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.730845 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.731315 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.732365 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.732812 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.733342 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.734271 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.734798 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.735804 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.736239 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.736788 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.737789 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.737772 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.738340 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.739321 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.739802 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.740835 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.741313 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.741905 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.743125 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.743662 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.744598 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.745053 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.746016 4753 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.746137 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.748232 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.749288 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.749692 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.751282 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.751598 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee37fdbe-64d5-4bb1-8522-932d83e0648e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://195b8bf18e6cf937def214e377ec9def799c19ea4d5f008b66cc42048656ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a5f37b26bfe7933088e08379662250e7b188efbbb5784d49e8d86262e10416\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0061bf86592b301a0a9398d2938e614f76616ed7ddcf310aa50c961632fb58b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0fea878c0275a3934d26327c13cf5a93ae3b881132788f3d031af5b382bbdd02\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fea878c0275a3934d26327c13cf5a93ae3b881132788f3d031af5b382bbdd02\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 17:04:35.064086 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 17:04:35.066802 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1773129596/tls.crt::/tmp/serving-cert-1773129596/tls.key\\\\\\\"\\\\nI1205 17:04:40.565607 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 17:04:40.585450 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 17:04:40.585477 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 17:04:40.585506 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 17:04:40.585513 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:04:40.600592 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:04:40.600617 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600622 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600626 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:04:40.600629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:04:40.600632 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:04:40.600635 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:04:40.600704 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 17:04:40.602558 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0585e546af65a61265267b97083556b811ad35ee6c48fa84262b3f4f25eaf907\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.751959 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.752829 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.753685 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.754872 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.755368 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.756397 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.756995 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.757923 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.758477 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.759382 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.759901 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.761080 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.761578 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.762095 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.762708 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.762793 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65284411-042e-46b9-8e55-cabc9e78a397\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e83b48779d4b8b709f9bd7351180040315002f738ada5ab034c883f87ef8734\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f6e27e109eeb52dae4e4b35bdf84e78f0786fc8d15c2498dd8d8c70c3d64e4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7886d72b05768ec1d515cfb2837f52ae16e82445b72a3016762308b80892d55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a662a8ca2ca9bcf02ace463bf26e44293b9776c69f3f1f84110392c3cab6e83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.763432 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.764003 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.764501 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.772665 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.780993 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.797507 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.820940 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.827449 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"c7235c54b3f40c13b44788f0315301d1593a5c16ff63a13a00b56765e68a2542"} Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.828364 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"db976cd75ef74cd695abebc7d0de4c7e3eccb79978845d51807d3a565826c887"} Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.829930 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"967551699c5e044b9390d25a7319d78bb75a79fba0d05f89e125287009810523"} Dec 05 17:04:41 crc kubenswrapper[4753]: I1205 17:04:41.839380 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 17:04:41 crc kubenswrapper[4753]: E1205 17:04:41.839742 4753 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"kube-controller-manager-crc\" already exists" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 17:04:42 crc kubenswrapper[4753]: I1205 17:04:42.486519 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:04:42 crc kubenswrapper[4753]: I1205 17:04:42.486637 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:04:42 crc kubenswrapper[4753]: I1205 17:04:42.486683 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:04:42 crc kubenswrapper[4753]: I1205 17:04:42.486716 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:04:42 crc kubenswrapper[4753]: E1205 17:04:42.486810 4753 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 17:04:42 crc kubenswrapper[4753]: E1205 17:04:42.486810 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:04:44.486768855 +0000 UTC m=+22.989875871 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:04:42 crc kubenswrapper[4753]: I1205 17:04:42.486915 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:04:42 crc kubenswrapper[4753]: E1205 17:04:42.486973 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 17:04:44.486947001 +0000 UTC m=+22.990054207 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 17:04:42 crc kubenswrapper[4753]: E1205 17:04:42.487003 4753 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 17:04:42 crc kubenswrapper[4753]: E1205 17:04:42.487035 4753 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 17:04:42 crc kubenswrapper[4753]: E1205 17:04:42.487053 4753 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 17:04:42 crc kubenswrapper[4753]: E1205 17:04:42.487084 4753 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 17:04:42 crc kubenswrapper[4753]: E1205 17:04:42.487099 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 17:04:44.487088396 +0000 UTC m=+22.990195442 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 17:04:42 crc kubenswrapper[4753]: E1205 17:04:42.487102 4753 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 17:04:42 crc kubenswrapper[4753]: E1205 17:04:42.487124 4753 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 17:04:42 crc kubenswrapper[4753]: E1205 17:04:42.487180 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 17:04:44.487170379 +0000 UTC m=+22.990277585 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 17:04:42 crc kubenswrapper[4753]: E1205 17:04:42.487190 4753 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 17:04:42 crc kubenswrapper[4753]: E1205 17:04:42.487222 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 17:04:44.48721544 +0000 UTC m=+22.990322666 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 17:04:42 crc kubenswrapper[4753]: I1205 17:04:42.720761 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:04:42 crc kubenswrapper[4753]: E1205 17:04:42.721227 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:04:42 crc kubenswrapper[4753]: I1205 17:04:42.720827 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:04:42 crc kubenswrapper[4753]: E1205 17:04:42.721302 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:04:42 crc kubenswrapper[4753]: I1205 17:04:42.834191 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"b8934d1ec7490b421708e4678d69a55cdd381563ada3d945d066fcf4abba49f3"} Dec 05 17:04:42 crc kubenswrapper[4753]: I1205 17:04:42.834234 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"e799acff1e50a664e95e30896e435bc77a35c8560044c5f180e16c3534bcf828"} Dec 05 17:04:42 crc kubenswrapper[4753]: I1205 17:04:42.836123 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 05 17:04:42 crc kubenswrapper[4753]: I1205 17:04:42.838092 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"20eb41ef00b7711fdfef0e4f3bd83b2fbdb193be96cafc0ef87f048271e7dd78"} Dec 05 17:04:42 crc kubenswrapper[4753]: I1205 17:04:42.838371 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:04:42 crc kubenswrapper[4753]: I1205 17:04:42.840699 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"8c52dbe30298514a572fd7cf4a301357ef0290af5894be7f9d08aff6c3fbe85a"} Dec 05 17:04:42 crc kubenswrapper[4753]: I1205 17:04:42.861379 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 17:04:42 crc kubenswrapper[4753]: I1205 17:04:42.875806 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 17:04:42 crc kubenswrapper[4753]: I1205 17:04:42.888499 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 17:04:42 crc kubenswrapper[4753]: I1205 17:04:42.902255 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 17:04:42 crc kubenswrapper[4753]: I1205 17:04:42.922360 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee37fdbe-64d5-4bb1-8522-932d83e0648e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://195b8bf18e6cf937def214e377ec9def799c19ea4d5f008b66cc42048656ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a5f37b26bfe7933088e08379662250e7b188efbbb5784d49e8d86262e10416\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0061bf86592b301a0a9398d2938e614f76616ed7ddcf310aa50c961632fb58b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0fea878c0275a3934d26327c13cf5a93ae3b881132788f3d031af5b382bbdd02\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fea878c0275a3934d26327c13cf5a93ae3b881132788f3d031af5b382bbdd02\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 17:04:35.064086 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 17:04:35.066802 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1773129596/tls.crt::/tmp/serving-cert-1773129596/tls.key\\\\\\\"\\\\nI1205 17:04:40.565607 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 17:04:40.585450 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 17:04:40.585477 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 17:04:40.585506 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 17:04:40.585513 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:04:40.600592 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:04:40.600617 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600622 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600626 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:04:40.600629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:04:40.600632 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:04:40.600635 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:04:40.600704 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 17:04:40.602558 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0585e546af65a61265267b97083556b811ad35ee6c48fa84262b3f4f25eaf907\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 17:04:42 crc kubenswrapper[4753]: I1205 17:04:42.935558 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65284411-042e-46b9-8e55-cabc9e78a397\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e83b48779d4b8b709f9bd7351180040315002f738ada5ab034c883f87ef8734\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f6e27e109eeb52dae4e4b35bdf84e78f0786fc8d15c2498dd8d8c70c3d64e4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7886d72b05768ec1d515cfb2837f52ae16e82445b72a3016762308b80892d55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a662a8ca2ca9bcf02ace463bf26e44293b9776c69f3f1f84110392c3cab6e83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 17:04:42 crc kubenswrapper[4753]: I1205 17:04:42.949906 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-vj5f7"] Dec 05 17:04:42 crc kubenswrapper[4753]: I1205 17:04:42.950244 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-vj5f7" Dec 05 17:04:42 crc kubenswrapper[4753]: I1205 17:04:42.951973 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 05 17:04:42 crc kubenswrapper[4753]: I1205 17:04:42.952375 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 05 17:04:42 crc kubenswrapper[4753]: I1205 17:04:42.957126 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 05 17:04:42 crc kubenswrapper[4753]: I1205 17:04:42.969827 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8934d1ec7490b421708e4678d69a55cdd381563ada3d945d066fcf4abba49f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e799acff1e50a664e95e30896e435bc77a35c8560044c5f180e16c3534bcf828\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 17:04:42 crc kubenswrapper[4753]: I1205 17:04:42.979402 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 17:04:42 crc kubenswrapper[4753]: I1205 17:04:42.989021 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c52dbe30298514a572fd7cf4a301357ef0290af5894be7f9d08aff6c3fbe85a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.001810 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee37fdbe-64d5-4bb1-8522-932d83e0648e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://195b8bf18e6cf937def214e377ec9def799c19ea4d5f008b66cc42048656ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a5f37b26bfe7933088e08379662250e7b188efbbb5784d49e8d86262e10416\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0061bf86592b301a0a9398d2938e614f76616ed7ddcf310aa50c961632fb58b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20eb41ef00b7711fdfef0e4f3bd83b2fbdb193be96cafc0ef87f048271e7dd78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fea878c0275a3934d26327c13cf5a93ae3b881132788f3d031af5b382bbdd02\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 17:04:35.064086 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 17:04:35.066802 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1773129596/tls.crt::/tmp/serving-cert-1773129596/tls.key\\\\\\\"\\\\nI1205 17:04:40.565607 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 17:04:40.585450 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 17:04:40.585477 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 17:04:40.585506 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 17:04:40.585513 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:04:40.600592 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:04:40.600617 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600622 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600626 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:04:40.600629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:04:40.600632 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:04:40.600635 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:04:40.600704 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 17:04:40.602558 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0585e546af65a61265267b97083556b811ad35ee6c48fa84262b3f4f25eaf907\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.012551 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65284411-042e-46b9-8e55-cabc9e78a397\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e83b48779d4b8b709f9bd7351180040315002f738ada5ab034c883f87ef8734\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f6e27e109eeb52dae4e4b35bdf84e78f0786fc8d15c2498dd8d8c70c3d64e4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7886d72b05768ec1d515cfb2837f52ae16e82445b72a3016762308b80892d55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a662a8ca2ca9bcf02ace463bf26e44293b9776c69f3f1f84110392c3cab6e83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.023099 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8934d1ec7490b421708e4678d69a55cdd381563ada3d945d066fcf4abba49f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e799acff1e50a664e95e30896e435bc77a35c8560044c5f180e16c3534bcf828\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.032327 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.042029 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.058006 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.070210 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:43Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.080206 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vj5f7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f38be29-f040-4e7d-9026-36929c0c5cda\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc2cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vj5f7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:43Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.090987 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vc2cd\" (UniqueName: \"kubernetes.io/projected/7f38be29-f040-4e7d-9026-36929c0c5cda-kube-api-access-vc2cd\") pod \"node-resolver-vj5f7\" (UID: \"7f38be29-f040-4e7d-9026-36929c0c5cda\") " pod="openshift-dns/node-resolver-vj5f7" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.091023 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/7f38be29-f040-4e7d-9026-36929c0c5cda-hosts-file\") pod \"node-resolver-vj5f7\" (UID: \"7f38be29-f040-4e7d-9026-36929c0c5cda\") " pod="openshift-dns/node-resolver-vj5f7" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.191520 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/7f38be29-f040-4e7d-9026-36929c0c5cda-hosts-file\") pod \"node-resolver-vj5f7\" (UID: \"7f38be29-f040-4e7d-9026-36929c0c5cda\") " pod="openshift-dns/node-resolver-vj5f7" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.191605 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vc2cd\" (UniqueName: \"kubernetes.io/projected/7f38be29-f040-4e7d-9026-36929c0c5cda-kube-api-access-vc2cd\") pod \"node-resolver-vj5f7\" (UID: \"7f38be29-f040-4e7d-9026-36929c0c5cda\") " pod="openshift-dns/node-resolver-vj5f7" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.191662 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/7f38be29-f040-4e7d-9026-36929c0c5cda-hosts-file\") pod \"node-resolver-vj5f7\" (UID: \"7f38be29-f040-4e7d-9026-36929c0c5cda\") " pod="openshift-dns/node-resolver-vj5f7" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.217501 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vc2cd\" (UniqueName: \"kubernetes.io/projected/7f38be29-f040-4e7d-9026-36929c0c5cda-kube-api-access-vc2cd\") pod \"node-resolver-vj5f7\" (UID: \"7f38be29-f040-4e7d-9026-36929c0c5cda\") " pod="openshift-dns/node-resolver-vj5f7" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.264347 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-vj5f7" Dec 05 17:04:43 crc kubenswrapper[4753]: W1205 17:04:43.282504 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7f38be29_f040_4e7d_9026_36929c0c5cda.slice/crio-17089bb6a0993e2b5862381021959532149fbed9ae0e297c775ca11afbca5127 WatchSource:0}: Error finding container 17089bb6a0993e2b5862381021959532149fbed9ae0e297c775ca11afbca5127: Status 404 returned error can't find the container with id 17089bb6a0993e2b5862381021959532149fbed9ae0e297c775ca11afbca5127 Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.330254 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-2rg4s"] Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.331174 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-khn68"] Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.331570 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-2rg4s" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.331605 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-hpl8r"] Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.331715 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-khn68" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.332130 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-hpl8r" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.334034 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.334192 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.336977 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.337127 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.337218 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.337257 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.337469 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.338189 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.344240 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.344269 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.344276 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.344357 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.359963 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:43Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.384913 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:43Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.421170 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:43Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.435018 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:43Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.447918 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vj5f7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f38be29-f040-4e7d-9026-36929c0c5cda\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc2cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vj5f7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:43Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.463739 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee37fdbe-64d5-4bb1-8522-932d83e0648e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://195b8bf18e6cf937def214e377ec9def799c19ea4d5f008b66cc42048656ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a5f37b26bfe7933088e08379662250e7b188efbbb5784d49e8d86262e10416\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0061bf86592b301a0a9398d2938e614f76616ed7ddcf310aa50c961632fb58b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20eb41ef00b7711fdfef0e4f3bd83b2fbdb193be96cafc0ef87f048271e7dd78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fea878c0275a3934d26327c13cf5a93ae3b881132788f3d031af5b382bbdd02\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 17:04:35.064086 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 17:04:35.066802 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1773129596/tls.crt::/tmp/serving-cert-1773129596/tls.key\\\\\\\"\\\\nI1205 17:04:40.565607 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 17:04:40.585450 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 17:04:40.585477 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 17:04:40.585506 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 17:04:40.585513 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:04:40.600592 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:04:40.600617 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600622 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600626 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:04:40.600629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:04:40.600632 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:04:40.600635 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:04:40.600704 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 17:04:40.602558 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0585e546af65a61265267b97083556b811ad35ee6c48fa84262b3f4f25eaf907\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:43Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.478687 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65284411-042e-46b9-8e55-cabc9e78a397\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e83b48779d4b8b709f9bd7351180040315002f738ada5ab034c883f87ef8734\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f6e27e109eeb52dae4e4b35bdf84e78f0786fc8d15c2498dd8d8c70c3d64e4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7886d72b05768ec1d515cfb2837f52ae16e82445b72a3016762308b80892d55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a662a8ca2ca9bcf02ace463bf26e44293b9776c69f3f1f84110392c3cab6e83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:43Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.492821 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8934d1ec7490b421708e4678d69a55cdd381563ada3d945d066fcf4abba49f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e799acff1e50a664e95e30896e435bc77a35c8560044c5f180e16c3534bcf828\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:43Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.498924 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/6b3d3501-4f16-4375-adf2-fd54b1cd13cf-multus-cni-dir\") pod \"multus-hpl8r\" (UID: \"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\") " pod="openshift-multus/multus-hpl8r" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.498963 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/6b3d3501-4f16-4375-adf2-fd54b1cd13cf-etc-kubernetes\") pod \"multus-hpl8r\" (UID: \"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\") " pod="openshift-multus/multus-hpl8r" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.498998 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/6b3d3501-4f16-4375-adf2-fd54b1cd13cf-host-run-netns\") pod \"multus-hpl8r\" (UID: \"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\") " pod="openshift-multus/multus-hpl8r" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.499027 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dmzrw\" (UniqueName: \"kubernetes.io/projected/6b3d3501-4f16-4375-adf2-fd54b1cd13cf-kube-api-access-dmzrw\") pod \"multus-hpl8r\" (UID: \"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\") " pod="openshift-multus/multus-hpl8r" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.499055 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/751d4d21-4eb8-4236-bca2-d81f094ff2f8-os-release\") pod \"multus-additional-cni-plugins-2rg4s\" (UID: \"751d4d21-4eb8-4236-bca2-d81f094ff2f8\") " pod="openshift-multus/multus-additional-cni-plugins-2rg4s" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.499081 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l5sdf\" (UniqueName: \"kubernetes.io/projected/751d4d21-4eb8-4236-bca2-d81f094ff2f8-kube-api-access-l5sdf\") pod \"multus-additional-cni-plugins-2rg4s\" (UID: \"751d4d21-4eb8-4236-bca2-d81f094ff2f8\") " pod="openshift-multus/multus-additional-cni-plugins-2rg4s" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.499107 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/6b3d3501-4f16-4375-adf2-fd54b1cd13cf-system-cni-dir\") pod \"multus-hpl8r\" (UID: \"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\") " pod="openshift-multus/multus-hpl8r" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.499134 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/6b3d3501-4f16-4375-adf2-fd54b1cd13cf-host-var-lib-cni-multus\") pod \"multus-hpl8r\" (UID: \"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\") " pod="openshift-multus/multus-hpl8r" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.499190 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/751d4d21-4eb8-4236-bca2-d81f094ff2f8-cni-binary-copy\") pod \"multus-additional-cni-plugins-2rg4s\" (UID: \"751d4d21-4eb8-4236-bca2-d81f094ff2f8\") " pod="openshift-multus/multus-additional-cni-plugins-2rg4s" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.499213 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/6b3d3501-4f16-4375-adf2-fd54b1cd13cf-os-release\") pod \"multus-hpl8r\" (UID: \"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\") " pod="openshift-multus/multus-hpl8r" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.499234 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/6b3d3501-4f16-4375-adf2-fd54b1cd13cf-cnibin\") pod \"multus-hpl8r\" (UID: \"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\") " pod="openshift-multus/multus-hpl8r" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.499261 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/6b3d3501-4f16-4375-adf2-fd54b1cd13cf-multus-daemon-config\") pod \"multus-hpl8r\" (UID: \"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\") " pod="openshift-multus/multus-hpl8r" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.499283 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/6b3d3501-4f16-4375-adf2-fd54b1cd13cf-host-run-multus-certs\") pod \"multus-hpl8r\" (UID: \"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\") " pod="openshift-multus/multus-hpl8r" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.499403 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vzfkz\" (UniqueName: \"kubernetes.io/projected/3fc2db1a-9f5e-4f36-b713-1a385f3a2d68-kube-api-access-vzfkz\") pod \"machine-config-daemon-khn68\" (UID: \"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68\") " pod="openshift-machine-config-operator/machine-config-daemon-khn68" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.499473 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/6b3d3501-4f16-4375-adf2-fd54b1cd13cf-host-var-lib-kubelet\") pod \"multus-hpl8r\" (UID: \"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\") " pod="openshift-multus/multus-hpl8r" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.499507 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/6b3d3501-4f16-4375-adf2-fd54b1cd13cf-multus-socket-dir-parent\") pod \"multus-hpl8r\" (UID: \"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\") " pod="openshift-multus/multus-hpl8r" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.499529 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/6b3d3501-4f16-4375-adf2-fd54b1cd13cf-cni-binary-copy\") pod \"multus-hpl8r\" (UID: \"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\") " pod="openshift-multus/multus-hpl8r" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.499551 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/6b3d3501-4f16-4375-adf2-fd54b1cd13cf-host-run-k8s-cni-cncf-io\") pod \"multus-hpl8r\" (UID: \"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\") " pod="openshift-multus/multus-hpl8r" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.499573 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/6b3d3501-4f16-4375-adf2-fd54b1cd13cf-hostroot\") pod \"multus-hpl8r\" (UID: \"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\") " pod="openshift-multus/multus-hpl8r" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.499609 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/751d4d21-4eb8-4236-bca2-d81f094ff2f8-cnibin\") pod \"multus-additional-cni-plugins-2rg4s\" (UID: \"751d4d21-4eb8-4236-bca2-d81f094ff2f8\") " pod="openshift-multus/multus-additional-cni-plugins-2rg4s" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.499631 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/751d4d21-4eb8-4236-bca2-d81f094ff2f8-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-2rg4s\" (UID: \"751d4d21-4eb8-4236-bca2-d81f094ff2f8\") " pod="openshift-multus/multus-additional-cni-plugins-2rg4s" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.499671 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/751d4d21-4eb8-4236-bca2-d81f094ff2f8-system-cni-dir\") pod \"multus-additional-cni-plugins-2rg4s\" (UID: \"751d4d21-4eb8-4236-bca2-d81f094ff2f8\") " pod="openshift-multus/multus-additional-cni-plugins-2rg4s" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.499711 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/751d4d21-4eb8-4236-bca2-d81f094ff2f8-tuning-conf-dir\") pod \"multus-additional-cni-plugins-2rg4s\" (UID: \"751d4d21-4eb8-4236-bca2-d81f094ff2f8\") " pod="openshift-multus/multus-additional-cni-plugins-2rg4s" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.499729 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/3fc2db1a-9f5e-4f36-b713-1a385f3a2d68-rootfs\") pod \"machine-config-daemon-khn68\" (UID: \"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68\") " pod="openshift-machine-config-operator/machine-config-daemon-khn68" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.499797 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/3fc2db1a-9f5e-4f36-b713-1a385f3a2d68-proxy-tls\") pod \"machine-config-daemon-khn68\" (UID: \"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68\") " pod="openshift-machine-config-operator/machine-config-daemon-khn68" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.499822 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/6b3d3501-4f16-4375-adf2-fd54b1cd13cf-multus-conf-dir\") pod \"multus-hpl8r\" (UID: \"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\") " pod="openshift-multus/multus-hpl8r" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.499841 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/6b3d3501-4f16-4375-adf2-fd54b1cd13cf-host-var-lib-cni-bin\") pod \"multus-hpl8r\" (UID: \"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\") " pod="openshift-multus/multus-hpl8r" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.499893 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/3fc2db1a-9f5e-4f36-b713-1a385f3a2d68-mcd-auth-proxy-config\") pod \"machine-config-daemon-khn68\" (UID: \"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68\") " pod="openshift-machine-config-operator/machine-config-daemon-khn68" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.510095 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c52dbe30298514a572fd7cf4a301357ef0290af5894be7f9d08aff6c3fbe85a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:43Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.525221 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2rg4s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"751d4d21-4eb8-4236-bca2-d81f094ff2f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2rg4s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:43Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.540992 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65284411-042e-46b9-8e55-cabc9e78a397\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e83b48779d4b8b709f9bd7351180040315002f738ada5ab034c883f87ef8734\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f6e27e109eeb52dae4e4b35bdf84e78f0786fc8d15c2498dd8d8c70c3d64e4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7886d72b05768ec1d515cfb2837f52ae16e82445b72a3016762308b80892d55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a662a8ca2ca9bcf02ace463bf26e44293b9776c69f3f1f84110392c3cab6e83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:43Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.560839 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2rg4s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"751d4d21-4eb8-4236-bca2-d81f094ff2f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2rg4s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:43Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.576071 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-khn68\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:43Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.588946 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:43Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.600546 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/6b3d3501-4f16-4375-adf2-fd54b1cd13cf-os-release\") pod \"multus-hpl8r\" (UID: \"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\") " pod="openshift-multus/multus-hpl8r" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.600591 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/6b3d3501-4f16-4375-adf2-fd54b1cd13cf-host-var-lib-cni-multus\") pod \"multus-hpl8r\" (UID: \"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\") " pod="openshift-multus/multus-hpl8r" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.600609 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/751d4d21-4eb8-4236-bca2-d81f094ff2f8-cni-binary-copy\") pod \"multus-additional-cni-plugins-2rg4s\" (UID: \"751d4d21-4eb8-4236-bca2-d81f094ff2f8\") " pod="openshift-multus/multus-additional-cni-plugins-2rg4s" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.600625 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/6b3d3501-4f16-4375-adf2-fd54b1cd13cf-cnibin\") pod \"multus-hpl8r\" (UID: \"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\") " pod="openshift-multus/multus-hpl8r" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.600640 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/6b3d3501-4f16-4375-adf2-fd54b1cd13cf-multus-daemon-config\") pod \"multus-hpl8r\" (UID: \"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\") " pod="openshift-multus/multus-hpl8r" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.600654 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/6b3d3501-4f16-4375-adf2-fd54b1cd13cf-host-var-lib-kubelet\") pod \"multus-hpl8r\" (UID: \"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\") " pod="openshift-multus/multus-hpl8r" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.600671 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/6b3d3501-4f16-4375-adf2-fd54b1cd13cf-host-run-multus-certs\") pod \"multus-hpl8r\" (UID: \"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\") " pod="openshift-multus/multus-hpl8r" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.600695 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vzfkz\" (UniqueName: \"kubernetes.io/projected/3fc2db1a-9f5e-4f36-b713-1a385f3a2d68-kube-api-access-vzfkz\") pod \"machine-config-daemon-khn68\" (UID: \"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68\") " pod="openshift-machine-config-operator/machine-config-daemon-khn68" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.600709 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/6b3d3501-4f16-4375-adf2-fd54b1cd13cf-multus-socket-dir-parent\") pod \"multus-hpl8r\" (UID: \"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\") " pod="openshift-multus/multus-hpl8r" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.600723 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/751d4d21-4eb8-4236-bca2-d81f094ff2f8-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-2rg4s\" (UID: \"751d4d21-4eb8-4236-bca2-d81f094ff2f8\") " pod="openshift-multus/multus-additional-cni-plugins-2rg4s" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.600747 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/6b3d3501-4f16-4375-adf2-fd54b1cd13cf-cni-binary-copy\") pod \"multus-hpl8r\" (UID: \"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\") " pod="openshift-multus/multus-hpl8r" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.600763 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/6b3d3501-4f16-4375-adf2-fd54b1cd13cf-host-run-k8s-cni-cncf-io\") pod \"multus-hpl8r\" (UID: \"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\") " pod="openshift-multus/multus-hpl8r" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.600778 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/6b3d3501-4f16-4375-adf2-fd54b1cd13cf-hostroot\") pod \"multus-hpl8r\" (UID: \"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\") " pod="openshift-multus/multus-hpl8r" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.600794 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/751d4d21-4eb8-4236-bca2-d81f094ff2f8-cnibin\") pod \"multus-additional-cni-plugins-2rg4s\" (UID: \"751d4d21-4eb8-4236-bca2-d81f094ff2f8\") " pod="openshift-multus/multus-additional-cni-plugins-2rg4s" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.600807 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/3fc2db1a-9f5e-4f36-b713-1a385f3a2d68-rootfs\") pod \"machine-config-daemon-khn68\" (UID: \"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68\") " pod="openshift-machine-config-operator/machine-config-daemon-khn68" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.600821 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/751d4d21-4eb8-4236-bca2-d81f094ff2f8-system-cni-dir\") pod \"multus-additional-cni-plugins-2rg4s\" (UID: \"751d4d21-4eb8-4236-bca2-d81f094ff2f8\") " pod="openshift-multus/multus-additional-cni-plugins-2rg4s" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.600834 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/751d4d21-4eb8-4236-bca2-d81f094ff2f8-tuning-conf-dir\") pod \"multus-additional-cni-plugins-2rg4s\" (UID: \"751d4d21-4eb8-4236-bca2-d81f094ff2f8\") " pod="openshift-multus/multus-additional-cni-plugins-2rg4s" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.600858 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/3fc2db1a-9f5e-4f36-b713-1a385f3a2d68-proxy-tls\") pod \"machine-config-daemon-khn68\" (UID: \"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68\") " pod="openshift-machine-config-operator/machine-config-daemon-khn68" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.600873 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/6b3d3501-4f16-4375-adf2-fd54b1cd13cf-multus-conf-dir\") pod \"multus-hpl8r\" (UID: \"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\") " pod="openshift-multus/multus-hpl8r" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.600891 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/6b3d3501-4f16-4375-adf2-fd54b1cd13cf-host-var-lib-cni-bin\") pod \"multus-hpl8r\" (UID: \"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\") " pod="openshift-multus/multus-hpl8r" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.600914 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/3fc2db1a-9f5e-4f36-b713-1a385f3a2d68-mcd-auth-proxy-config\") pod \"machine-config-daemon-khn68\" (UID: \"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68\") " pod="openshift-machine-config-operator/machine-config-daemon-khn68" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.600929 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/6b3d3501-4f16-4375-adf2-fd54b1cd13cf-multus-cni-dir\") pod \"multus-hpl8r\" (UID: \"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\") " pod="openshift-multus/multus-hpl8r" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.600943 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/6b3d3501-4f16-4375-adf2-fd54b1cd13cf-etc-kubernetes\") pod \"multus-hpl8r\" (UID: \"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\") " pod="openshift-multus/multus-hpl8r" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.600975 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/6b3d3501-4f16-4375-adf2-fd54b1cd13cf-system-cni-dir\") pod \"multus-hpl8r\" (UID: \"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\") " pod="openshift-multus/multus-hpl8r" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.601002 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/6b3d3501-4f16-4375-adf2-fd54b1cd13cf-host-run-netns\") pod \"multus-hpl8r\" (UID: \"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\") " pod="openshift-multus/multus-hpl8r" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.601023 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dmzrw\" (UniqueName: \"kubernetes.io/projected/6b3d3501-4f16-4375-adf2-fd54b1cd13cf-kube-api-access-dmzrw\") pod \"multus-hpl8r\" (UID: \"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\") " pod="openshift-multus/multus-hpl8r" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.601048 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/751d4d21-4eb8-4236-bca2-d81f094ff2f8-os-release\") pod \"multus-additional-cni-plugins-2rg4s\" (UID: \"751d4d21-4eb8-4236-bca2-d81f094ff2f8\") " pod="openshift-multus/multus-additional-cni-plugins-2rg4s" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.601077 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l5sdf\" (UniqueName: \"kubernetes.io/projected/751d4d21-4eb8-4236-bca2-d81f094ff2f8-kube-api-access-l5sdf\") pod \"multus-additional-cni-plugins-2rg4s\" (UID: \"751d4d21-4eb8-4236-bca2-d81f094ff2f8\") " pod="openshift-multus/multus-additional-cni-plugins-2rg4s" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.601670 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/6b3d3501-4f16-4375-adf2-fd54b1cd13cf-os-release\") pod \"multus-hpl8r\" (UID: \"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\") " pod="openshift-multus/multus-hpl8r" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.601739 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/3fc2db1a-9f5e-4f36-b713-1a385f3a2d68-rootfs\") pod \"machine-config-daemon-khn68\" (UID: \"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68\") " pod="openshift-machine-config-operator/machine-config-daemon-khn68" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.601800 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/6b3d3501-4f16-4375-adf2-fd54b1cd13cf-host-var-lib-cni-multus\") pod \"multus-hpl8r\" (UID: \"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\") " pod="openshift-multus/multus-hpl8r" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.601948 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/751d4d21-4eb8-4236-bca2-d81f094ff2f8-system-cni-dir\") pod \"multus-additional-cni-plugins-2rg4s\" (UID: \"751d4d21-4eb8-4236-bca2-d81f094ff2f8\") " pod="openshift-multus/multus-additional-cni-plugins-2rg4s" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.602385 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/751d4d21-4eb8-4236-bca2-d81f094ff2f8-cni-binary-copy\") pod \"multus-additional-cni-plugins-2rg4s\" (UID: \"751d4d21-4eb8-4236-bca2-d81f094ff2f8\") " pod="openshift-multus/multus-additional-cni-plugins-2rg4s" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.602474 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/6b3d3501-4f16-4375-adf2-fd54b1cd13cf-etc-kubernetes\") pod \"multus-hpl8r\" (UID: \"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\") " pod="openshift-multus/multus-hpl8r" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.602521 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/6b3d3501-4f16-4375-adf2-fd54b1cd13cf-host-var-lib-cni-bin\") pod \"multus-hpl8r\" (UID: \"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\") " pod="openshift-multus/multus-hpl8r" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.602557 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/6b3d3501-4f16-4375-adf2-fd54b1cd13cf-multus-conf-dir\") pod \"multus-hpl8r\" (UID: \"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\") " pod="openshift-multus/multus-hpl8r" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.602571 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/6b3d3501-4f16-4375-adf2-fd54b1cd13cf-host-run-netns\") pod \"multus-hpl8r\" (UID: \"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\") " pod="openshift-multus/multus-hpl8r" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.602616 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/6b3d3501-4f16-4375-adf2-fd54b1cd13cf-host-var-lib-kubelet\") pod \"multus-hpl8r\" (UID: \"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\") " pod="openshift-multus/multus-hpl8r" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.602658 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/6b3d3501-4f16-4375-adf2-fd54b1cd13cf-system-cni-dir\") pod \"multus-hpl8r\" (UID: \"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\") " pod="openshift-multus/multus-hpl8r" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.602669 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/6b3d3501-4f16-4375-adf2-fd54b1cd13cf-cnibin\") pod \"multus-hpl8r\" (UID: \"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\") " pod="openshift-multus/multus-hpl8r" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.602699 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/6b3d3501-4f16-4375-adf2-fd54b1cd13cf-host-run-multus-certs\") pod \"multus-hpl8r\" (UID: \"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\") " pod="openshift-multus/multus-hpl8r" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.602723 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/6b3d3501-4f16-4375-adf2-fd54b1cd13cf-multus-cni-dir\") pod \"multus-hpl8r\" (UID: \"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\") " pod="openshift-multus/multus-hpl8r" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.602748 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/6b3d3501-4f16-4375-adf2-fd54b1cd13cf-host-run-k8s-cni-cncf-io\") pod \"multus-hpl8r\" (UID: \"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\") " pod="openshift-multus/multus-hpl8r" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.602757 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/6b3d3501-4f16-4375-adf2-fd54b1cd13cf-hostroot\") pod \"multus-hpl8r\" (UID: \"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\") " pod="openshift-multus/multus-hpl8r" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.602711 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/751d4d21-4eb8-4236-bca2-d81f094ff2f8-os-release\") pod \"multus-additional-cni-plugins-2rg4s\" (UID: \"751d4d21-4eb8-4236-bca2-d81f094ff2f8\") " pod="openshift-multus/multus-additional-cni-plugins-2rg4s" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.602785 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/751d4d21-4eb8-4236-bca2-d81f094ff2f8-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-2rg4s\" (UID: \"751d4d21-4eb8-4236-bca2-d81f094ff2f8\") " pod="openshift-multus/multus-additional-cni-plugins-2rg4s" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.602786 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/751d4d21-4eb8-4236-bca2-d81f094ff2f8-cnibin\") pod \"multus-additional-cni-plugins-2rg4s\" (UID: \"751d4d21-4eb8-4236-bca2-d81f094ff2f8\") " pod="openshift-multus/multus-additional-cni-plugins-2rg4s" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.602836 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/6b3d3501-4f16-4375-adf2-fd54b1cd13cf-multus-socket-dir-parent\") pod \"multus-hpl8r\" (UID: \"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\") " pod="openshift-multus/multus-hpl8r" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.603171 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/751d4d21-4eb8-4236-bca2-d81f094ff2f8-tuning-conf-dir\") pod \"multus-additional-cni-plugins-2rg4s\" (UID: \"751d4d21-4eb8-4236-bca2-d81f094ff2f8\") " pod="openshift-multus/multus-additional-cni-plugins-2rg4s" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.603426 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/6b3d3501-4f16-4375-adf2-fd54b1cd13cf-multus-daemon-config\") pod \"multus-hpl8r\" (UID: \"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\") " pod="openshift-multus/multus-hpl8r" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.603438 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/6b3d3501-4f16-4375-adf2-fd54b1cd13cf-cni-binary-copy\") pod \"multus-hpl8r\" (UID: \"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\") " pod="openshift-multus/multus-hpl8r" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.603500 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/3fc2db1a-9f5e-4f36-b713-1a385f3a2d68-mcd-auth-proxy-config\") pod \"machine-config-daemon-khn68\" (UID: \"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68\") " pod="openshift-machine-config-operator/machine-config-daemon-khn68" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.605216 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/3fc2db1a-9f5e-4f36-b713-1a385f3a2d68-proxy-tls\") pod \"machine-config-daemon-khn68\" (UID: \"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68\") " pod="openshift-machine-config-operator/machine-config-daemon-khn68" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.605599 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8934d1ec7490b421708e4678d69a55cdd381563ada3d945d066fcf4abba49f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e799acff1e50a664e95e30896e435bc77a35c8560044c5f180e16c3534bcf828\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:43Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.619428 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vzfkz\" (UniqueName: \"kubernetes.io/projected/3fc2db1a-9f5e-4f36-b713-1a385f3a2d68-kube-api-access-vzfkz\") pod \"machine-config-daemon-khn68\" (UID: \"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68\") " pod="openshift-machine-config-operator/machine-config-daemon-khn68" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.620097 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c52dbe30298514a572fd7cf4a301357ef0290af5894be7f9d08aff6c3fbe85a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:43Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.623745 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dmzrw\" (UniqueName: \"kubernetes.io/projected/6b3d3501-4f16-4375-adf2-fd54b1cd13cf-kube-api-access-dmzrw\") pod \"multus-hpl8r\" (UID: \"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\") " pod="openshift-multus/multus-hpl8r" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.628189 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l5sdf\" (UniqueName: \"kubernetes.io/projected/751d4d21-4eb8-4236-bca2-d81f094ff2f8-kube-api-access-l5sdf\") pod \"multus-additional-cni-plugins-2rg4s\" (UID: \"751d4d21-4eb8-4236-bca2-d81f094ff2f8\") " pod="openshift-multus/multus-additional-cni-plugins-2rg4s" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.638238 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hpl8r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmzrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hpl8r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:43Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.646699 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-khn68" Dec 05 17:04:43 crc kubenswrapper[4753]: W1205 17:04:43.661694 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3fc2db1a_9f5e_4f36_b713_1a385f3a2d68.slice/crio-77dd580b7095d67051fd5e48b6ff7ba4700cde2fb5314b208e6e14d82938aed3 WatchSource:0}: Error finding container 77dd580b7095d67051fd5e48b6ff7ba4700cde2fb5314b208e6e14d82938aed3: Status 404 returned error can't find the container with id 77dd580b7095d67051fd5e48b6ff7ba4700cde2fb5314b208e6e14d82938aed3 Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.661857 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-hpl8r" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.669130 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee37fdbe-64d5-4bb1-8522-932d83e0648e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://195b8bf18e6cf937def214e377ec9def799c19ea4d5f008b66cc42048656ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a5f37b26bfe7933088e08379662250e7b188efbbb5784d49e8d86262e10416\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0061bf86592b301a0a9398d2938e614f76616ed7ddcf310aa50c961632fb58b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20eb41ef00b7711fdfef0e4f3bd83b2fbdb193be96cafc0ef87f048271e7dd78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fea878c0275a3934d26327c13cf5a93ae3b881132788f3d031af5b382bbdd02\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 17:04:35.064086 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 17:04:35.066802 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1773129596/tls.crt::/tmp/serving-cert-1773129596/tls.key\\\\\\\"\\\\nI1205 17:04:40.565607 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 17:04:40.585450 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 17:04:40.585477 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 17:04:40.585506 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 17:04:40.585513 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:04:40.600592 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:04:40.600617 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600622 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600626 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:04:40.600629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:04:40.600632 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:04:40.600635 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:04:40.600704 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 17:04:40.602558 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0585e546af65a61265267b97083556b811ad35ee6c48fa84262b3f4f25eaf907\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:43Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.669407 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-2rg4s" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.692834 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:43Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.712734 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:43Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.720534 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:04:43 crc kubenswrapper[4753]: E1205 17:04:43.720633 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.731827 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vj5f7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f38be29-f040-4e7d-9026-36929c0c5cda\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc2cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vj5f7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:43Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.748425 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-98fvv"] Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.749671 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.754828 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.755092 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.755129 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.754894 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.754989 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.755026 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.756583 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.762046 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:43Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.776474 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:43Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.790672 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:43Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.804599 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:43Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.816591 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vj5f7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f38be29-f040-4e7d-9026-36929c0c5cda\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc2cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vj5f7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:43Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.836430 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2rg4s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"751d4d21-4eb8-4236-bca2-d81f094ff2f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2rg4s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:43Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.844286 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-2rg4s" event={"ID":"751d4d21-4eb8-4236-bca2-d81f094ff2f8","Type":"ContainerStarted","Data":"bd0f3f1fd2fcbd5096e27136230eff545f34aa99e380030be7f5a97cf7a77d68"} Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.845121 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" event={"ID":"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68","Type":"ContainerStarted","Data":"77dd580b7095d67051fd5e48b6ff7ba4700cde2fb5314b208e6e14d82938aed3"} Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.846279 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-vj5f7" event={"ID":"7f38be29-f040-4e7d-9026-36929c0c5cda","Type":"ContainerStarted","Data":"8b5d403644e00b367518058ecf22ed9913df9a1353e2fede8802deaa288a48e6"} Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.846308 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-vj5f7" event={"ID":"7f38be29-f040-4e7d-9026-36929c0c5cda","Type":"ContainerStarted","Data":"17089bb6a0993e2b5862381021959532149fbed9ae0e297c775ca11afbca5127"} Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.848604 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-hpl8r" event={"ID":"6b3d3501-4f16-4375-adf2-fd54b1cd13cf","Type":"ContainerStarted","Data":"e2df4c1ed975bbf0d6ccb3007897bdb46353684f7f72c6c297ef3b43d7c161cb"} Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.857873 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-98fvv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:43Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.877692 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65284411-042e-46b9-8e55-cabc9e78a397\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e83b48779d4b8b709f9bd7351180040315002f738ada5ab034c883f87ef8734\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f6e27e109eeb52dae4e4b35bdf84e78f0786fc8d15c2498dd8d8c70c3d64e4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7886d72b05768ec1d515cfb2837f52ae16e82445b72a3016762308b80892d55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a662a8ca2ca9bcf02ace463bf26e44293b9776c69f3f1f84110392c3cab6e83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:43Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.889199 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:43Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.901988 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-khn68\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:43Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.902710 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-node-log\") pod \"ovnkube-node-98fvv\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.902801 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-systemd-units\") pod \"ovnkube-node-98fvv\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.902826 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-host-cni-bin\") pod \"ovnkube-node-98fvv\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.902850 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-ovnkube-script-lib\") pod \"ovnkube-node-98fvv\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.902870 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-98fvv\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.902898 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-env-overrides\") pod \"ovnkube-node-98fvv\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.902928 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-run-ovn\") pod \"ovnkube-node-98fvv\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.902950 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-etc-openvswitch\") pod \"ovnkube-node-98fvv\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.902964 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-run-openvswitch\") pod \"ovnkube-node-98fvv\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.902987 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-host-slash\") pod \"ovnkube-node-98fvv\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.903005 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-host-run-netns\") pod \"ovnkube-node-98fvv\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.903020 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-run-systemd\") pod \"ovnkube-node-98fvv\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.903036 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-ovn-node-metrics-cert\") pod \"ovnkube-node-98fvv\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.903109 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-host-run-ovn-kubernetes\") pod \"ovnkube-node-98fvv\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.903312 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-var-lib-openvswitch\") pod \"ovnkube-node-98fvv\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.903339 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-log-socket\") pod \"ovnkube-node-98fvv\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.903357 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2fgl2\" (UniqueName: \"kubernetes.io/projected/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-kube-api-access-2fgl2\") pod \"ovnkube-node-98fvv\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.903375 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-host-cni-netd\") pod \"ovnkube-node-98fvv\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.903419 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-host-kubelet\") pod \"ovnkube-node-98fvv\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.903437 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-ovnkube-config\") pod \"ovnkube-node-98fvv\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.915230 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee37fdbe-64d5-4bb1-8522-932d83e0648e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://195b8bf18e6cf937def214e377ec9def799c19ea4d5f008b66cc42048656ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a5f37b26bfe7933088e08379662250e7b188efbbb5784d49e8d86262e10416\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0061bf86592b301a0a9398d2938e614f76616ed7ddcf310aa50c961632fb58b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20eb41ef00b7711fdfef0e4f3bd83b2fbdb193be96cafc0ef87f048271e7dd78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fea878c0275a3934d26327c13cf5a93ae3b881132788f3d031af5b382bbdd02\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 17:04:35.064086 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 17:04:35.066802 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1773129596/tls.crt::/tmp/serving-cert-1773129596/tls.key\\\\\\\"\\\\nI1205 17:04:40.565607 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 17:04:40.585450 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 17:04:40.585477 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 17:04:40.585506 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 17:04:40.585513 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:04:40.600592 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:04:40.600617 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600622 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600626 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:04:40.600629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:04:40.600632 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:04:40.600635 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:04:40.600704 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 17:04:40.602558 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0585e546af65a61265267b97083556b811ad35ee6c48fa84262b3f4f25eaf907\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:43Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.928986 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8934d1ec7490b421708e4678d69a55cdd381563ada3d945d066fcf4abba49f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e799acff1e50a664e95e30896e435bc77a35c8560044c5f180e16c3534bcf828\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:43Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.944946 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c52dbe30298514a572fd7cf4a301357ef0290af5894be7f9d08aff6c3fbe85a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:43Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.969707 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hpl8r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmzrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hpl8r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:43Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:43 crc kubenswrapper[4753]: I1205 17:04:43.987589 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:43Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.002791 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vj5f7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f38be29-f040-4e7d-9026-36929c0c5cda\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b5d403644e00b367518058ecf22ed9913df9a1353e2fede8802deaa288a48e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc2cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vj5f7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:43Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.004212 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-host-kubelet\") pod \"ovnkube-node-98fvv\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.004262 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-host-cni-netd\") pod \"ovnkube-node-98fvv\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.004285 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-ovnkube-config\") pod \"ovnkube-node-98fvv\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.004324 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-node-log\") pod \"ovnkube-node-98fvv\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.004353 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-systemd-units\") pod \"ovnkube-node-98fvv\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.004391 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-host-cni-bin\") pod \"ovnkube-node-98fvv\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.004375 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-host-kubelet\") pod \"ovnkube-node-98fvv\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.004428 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-host-cni-netd\") pod \"ovnkube-node-98fvv\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.004476 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-node-log\") pod \"ovnkube-node-98fvv\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.004474 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-systemd-units\") pod \"ovnkube-node-98fvv\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.004526 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-ovnkube-script-lib\") pod \"ovnkube-node-98fvv\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.004541 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-host-cni-bin\") pod \"ovnkube-node-98fvv\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.004679 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-98fvv\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.004708 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-env-overrides\") pod \"ovnkube-node-98fvv\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.004746 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-run-ovn\") pod \"ovnkube-node-98fvv\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.004782 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-98fvv\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.004811 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-etc-openvswitch\") pod \"ovnkube-node-98fvv\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.004833 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-run-openvswitch\") pod \"ovnkube-node-98fvv\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.004905 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-run-ovn\") pod \"ovnkube-node-98fvv\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.004992 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-etc-openvswitch\") pod \"ovnkube-node-98fvv\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.005016 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-host-run-netns\") pod \"ovnkube-node-98fvv\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.005040 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-run-openvswitch\") pod \"ovnkube-node-98fvv\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.005170 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-host-slash\") pod \"ovnkube-node-98fvv\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.005214 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-host-slash\") pod \"ovnkube-node-98fvv\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.005214 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-run-systemd\") pod \"ovnkube-node-98fvv\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.005237 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-run-systemd\") pod \"ovnkube-node-98fvv\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.005275 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-ovn-node-metrics-cert\") pod \"ovnkube-node-98fvv\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.005051 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-host-run-netns\") pod \"ovnkube-node-98fvv\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.005343 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-env-overrides\") pod \"ovnkube-node-98fvv\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.005340 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-host-run-ovn-kubernetes\") pod \"ovnkube-node-98fvv\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.005417 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-host-run-ovn-kubernetes\") pod \"ovnkube-node-98fvv\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.005507 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-var-lib-openvswitch\") pod \"ovnkube-node-98fvv\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.005544 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-log-socket\") pod \"ovnkube-node-98fvv\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.005561 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2fgl2\" (UniqueName: \"kubernetes.io/projected/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-kube-api-access-2fgl2\") pod \"ovnkube-node-98fvv\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.005585 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-var-lib-openvswitch\") pod \"ovnkube-node-98fvv\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.005803 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-log-socket\") pod \"ovnkube-node-98fvv\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.006037 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-ovnkube-config\") pod \"ovnkube-node-98fvv\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.006512 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-ovnkube-script-lib\") pod \"ovnkube-node-98fvv\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.010658 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-ovn-node-metrics-cert\") pod \"ovnkube-node-98fvv\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.023895 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:44Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.024394 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2fgl2\" (UniqueName: \"kubernetes.io/projected/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-kube-api-access-2fgl2\") pod \"ovnkube-node-98fvv\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.038111 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:44Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.049565 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65284411-042e-46b9-8e55-cabc9e78a397\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e83b48779d4b8b709f9bd7351180040315002f738ada5ab034c883f87ef8734\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f6e27e109eeb52dae4e4b35bdf84e78f0786fc8d15c2498dd8d8c70c3d64e4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7886d72b05768ec1d515cfb2837f52ae16e82445b72a3016762308b80892d55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a662a8ca2ca9bcf02ace463bf26e44293b9776c69f3f1f84110392c3cab6e83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:44Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.062515 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2rg4s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"751d4d21-4eb8-4236-bca2-d81f094ff2f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2rg4s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:44Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.078343 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.082234 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-98fvv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:44Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:44 crc kubenswrapper[4753]: W1205 17:04:44.090974 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf00ad131_6b85_4d0d_8fa5_1dfbdc7e161a.slice/crio-add9b9ff7a0d70abfbdaffb357f729ced8eb94fde0897438462c649c2503ed16 WatchSource:0}: Error finding container add9b9ff7a0d70abfbdaffb357f729ced8eb94fde0897438462c649c2503ed16: Status 404 returned error can't find the container with id add9b9ff7a0d70abfbdaffb357f729ced8eb94fde0897438462c649c2503ed16 Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.101539 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:44Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.118688 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-khn68\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:44Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.144564 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c52dbe30298514a572fd7cf4a301357ef0290af5894be7f9d08aff6c3fbe85a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:44Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.164485 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hpl8r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmzrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hpl8r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:44Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.178618 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee37fdbe-64d5-4bb1-8522-932d83e0648e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://195b8bf18e6cf937def214e377ec9def799c19ea4d5f008b66cc42048656ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a5f37b26bfe7933088e08379662250e7b188efbbb5784d49e8d86262e10416\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0061bf86592b301a0a9398d2938e614f76616ed7ddcf310aa50c961632fb58b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20eb41ef00b7711fdfef0e4f3bd83b2fbdb193be96cafc0ef87f048271e7dd78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fea878c0275a3934d26327c13cf5a93ae3b881132788f3d031af5b382bbdd02\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 17:04:35.064086 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 17:04:35.066802 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1773129596/tls.crt::/tmp/serving-cert-1773129596/tls.key\\\\\\\"\\\\nI1205 17:04:40.565607 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 17:04:40.585450 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 17:04:40.585477 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 17:04:40.585506 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 17:04:40.585513 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:04:40.600592 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:04:40.600617 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600622 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600626 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:04:40.600629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:04:40.600632 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:04:40.600635 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:04:40.600704 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 17:04:40.602558 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0585e546af65a61265267b97083556b811ad35ee6c48fa84262b3f4f25eaf907\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:44Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.190191 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8934d1ec7490b421708e4678d69a55cdd381563ada3d945d066fcf4abba49f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e799acff1e50a664e95e30896e435bc77a35c8560044c5f180e16c3534bcf828\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:44Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.511875 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.511963 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.511985 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.512007 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.512029 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:04:44 crc kubenswrapper[4753]: E1205 17:04:44.512132 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:04:48.512103072 +0000 UTC m=+27.015210078 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:04:44 crc kubenswrapper[4753]: E1205 17:04:44.512166 4753 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 17:04:44 crc kubenswrapper[4753]: E1205 17:04:44.512165 4753 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 17:04:44 crc kubenswrapper[4753]: E1205 17:04:44.512188 4753 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 17:04:44 crc kubenswrapper[4753]: E1205 17:04:44.512202 4753 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 17:04:44 crc kubenswrapper[4753]: E1205 17:04:44.512202 4753 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 17:04:44 crc kubenswrapper[4753]: E1205 17:04:44.512218 4753 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 17:04:44 crc kubenswrapper[4753]: E1205 17:04:44.512190 4753 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 17:04:44 crc kubenswrapper[4753]: E1205 17:04:44.512263 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 17:04:48.512243226 +0000 UTC m=+27.015350302 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 17:04:44 crc kubenswrapper[4753]: E1205 17:04:44.512265 4753 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 17:04:44 crc kubenswrapper[4753]: E1205 17:04:44.512281 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 17:04:48.512273697 +0000 UTC m=+27.015380824 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 17:04:44 crc kubenswrapper[4753]: E1205 17:04:44.512329 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 17:04:48.512289258 +0000 UTC m=+27.015396364 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 17:04:44 crc kubenswrapper[4753]: E1205 17:04:44.512348 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 17:04:48.51233935 +0000 UTC m=+27.015446456 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.545654 4753 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.547795 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.547824 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.547833 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.547918 4753 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.554166 4753 kubelet_node_status.go:115] "Node was previously registered" node="crc" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.554463 4753 kubelet_node_status.go:79] "Successfully registered node" node="crc" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.555612 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.555642 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.555654 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.555671 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.555682 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:44Z","lastTransitionTime":"2025-12-05T17:04:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:44 crc kubenswrapper[4753]: E1205 17:04:44.578805 4753 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6cf3541b-b15a-4035-99a1-dd4a7b86e07c\\\",\\\"systemUUID\\\":\\\"6ae126b1-60b3-4aa4-8711-3da4c1b89426\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:44Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.582208 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.582244 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.582253 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.582267 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.582278 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:44Z","lastTransitionTime":"2025-12-05T17:04:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:44 crc kubenswrapper[4753]: E1205 17:04:44.594008 4753 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6cf3541b-b15a-4035-99a1-dd4a7b86e07c\\\",\\\"systemUUID\\\":\\\"6ae126b1-60b3-4aa4-8711-3da4c1b89426\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:44Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.597519 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.597557 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.597568 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.597585 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.597601 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:44Z","lastTransitionTime":"2025-12-05T17:04:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:44 crc kubenswrapper[4753]: E1205 17:04:44.609978 4753 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6cf3541b-b15a-4035-99a1-dd4a7b86e07c\\\",\\\"systemUUID\\\":\\\"6ae126b1-60b3-4aa4-8711-3da4c1b89426\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:44Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.613765 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.613801 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.613810 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.613823 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.613832 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:44Z","lastTransitionTime":"2025-12-05T17:04:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:44 crc kubenswrapper[4753]: E1205 17:04:44.626229 4753 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6cf3541b-b15a-4035-99a1-dd4a7b86e07c\\\",\\\"systemUUID\\\":\\\"6ae126b1-60b3-4aa4-8711-3da4c1b89426\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:44Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.629723 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.629759 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.629768 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.629786 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.629796 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:44Z","lastTransitionTime":"2025-12-05T17:04:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:44 crc kubenswrapper[4753]: E1205 17:04:44.645599 4753 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6cf3541b-b15a-4035-99a1-dd4a7b86e07c\\\",\\\"systemUUID\\\":\\\"6ae126b1-60b3-4aa4-8711-3da4c1b89426\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:44Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:44 crc kubenswrapper[4753]: E1205 17:04:44.645735 4753 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.647348 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.647383 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.647398 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.647419 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.647431 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:44Z","lastTransitionTime":"2025-12-05T17:04:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.720292 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.720404 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:04:44 crc kubenswrapper[4753]: E1205 17:04:44.720429 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:04:44 crc kubenswrapper[4753]: E1205 17:04:44.720653 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.750122 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.750183 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.750195 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.750211 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.750225 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:44Z","lastTransitionTime":"2025-12-05T17:04:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.851921 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.851965 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.851976 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.851993 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.852004 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:44Z","lastTransitionTime":"2025-12-05T17:04:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.854130 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"408690fbd818b50273c914cc463acec30ff77bbf7d26680bffa2451a1d1ddcf7"} Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.856376 4753 generic.go:334] "Generic (PLEG): container finished" podID="751d4d21-4eb8-4236-bca2-d81f094ff2f8" containerID="aa8d4e5672a07ea28ddba02eaaf603750cd3067ff47165776d2dc7f2ad42a290" exitCode=0 Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.856455 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-2rg4s" event={"ID":"751d4d21-4eb8-4236-bca2-d81f094ff2f8","Type":"ContainerDied","Data":"aa8d4e5672a07ea28ddba02eaaf603750cd3067ff47165776d2dc7f2ad42a290"} Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.866000 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" event={"ID":"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68","Type":"ContainerStarted","Data":"78c3b80f4d08dc9fd4215fc87bbd7331f765f301fa587f5fd4ded54cd8db6587"} Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.866033 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" event={"ID":"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68","Type":"ContainerStarted","Data":"60c5d0ca3d26171050d592c3d76f0db4ca1a12344c40825a8e7c5fd579814c80"} Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.868439 4753 generic.go:334] "Generic (PLEG): container finished" podID="f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" containerID="073ffcef7324b498f2cb47680b64bf44e474a7732a5c2b34661da22c3e0f6c17" exitCode=0 Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.868555 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" event={"ID":"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a","Type":"ContainerDied","Data":"073ffcef7324b498f2cb47680b64bf44e474a7732a5c2b34661da22c3e0f6c17"} Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.868651 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" event={"ID":"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a","Type":"ContainerStarted","Data":"add9b9ff7a0d70abfbdaffb357f729ced8eb94fde0897438462c649c2503ed16"} Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.871361 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-hpl8r" event={"ID":"6b3d3501-4f16-4375-adf2-fd54b1cd13cf","Type":"ContainerStarted","Data":"8b2cc49130697a15123f202462d2f0781903baea370d4906d22d1b39c23320d7"} Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.871685 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://408690fbd818b50273c914cc463acec30ff77bbf7d26680bffa2451a1d1ddcf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:44Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.886343 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-khn68\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:44Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.903655 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c52dbe30298514a572fd7cf4a301357ef0290af5894be7f9d08aff6c3fbe85a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:44Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.919840 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hpl8r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmzrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hpl8r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:44Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.943200 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee37fdbe-64d5-4bb1-8522-932d83e0648e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://195b8bf18e6cf937def214e377ec9def799c19ea4d5f008b66cc42048656ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a5f37b26bfe7933088e08379662250e7b188efbbb5784d49e8d86262e10416\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0061bf86592b301a0a9398d2938e614f76616ed7ddcf310aa50c961632fb58b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20eb41ef00b7711fdfef0e4f3bd83b2fbdb193be96cafc0ef87f048271e7dd78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fea878c0275a3934d26327c13cf5a93ae3b881132788f3d031af5b382bbdd02\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 17:04:35.064086 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 17:04:35.066802 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1773129596/tls.crt::/tmp/serving-cert-1773129596/tls.key\\\\\\\"\\\\nI1205 17:04:40.565607 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 17:04:40.585450 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 17:04:40.585477 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 17:04:40.585506 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 17:04:40.585513 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:04:40.600592 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:04:40.600617 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600622 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600626 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:04:40.600629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:04:40.600632 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:04:40.600635 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:04:40.600704 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 17:04:40.602558 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0585e546af65a61265267b97083556b811ad35ee6c48fa84262b3f4f25eaf907\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:44Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.955003 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.955047 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.955084 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.955107 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.955123 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:44Z","lastTransitionTime":"2025-12-05T17:04:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.961450 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8934d1ec7490b421708e4678d69a55cdd381563ada3d945d066fcf4abba49f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e799acff1e50a664e95e30896e435bc77a35c8560044c5f180e16c3534bcf828\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:44Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.977469 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:44Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:44 crc kubenswrapper[4753]: I1205 17:04:44.989450 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vj5f7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f38be29-f040-4e7d-9026-36929c0c5cda\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b5d403644e00b367518058ecf22ed9913df9a1353e2fede8802deaa288a48e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc2cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vj5f7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:44Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.004855 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.017420 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.029496 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65284411-042e-46b9-8e55-cabc9e78a397\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e83b48779d4b8b709f9bd7351180040315002f738ada5ab034c883f87ef8734\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f6e27e109eeb52dae4e4b35bdf84e78f0786fc8d15c2498dd8d8c70c3d64e4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7886d72b05768ec1d515cfb2837f52ae16e82445b72a3016762308b80892d55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a662a8ca2ca9bcf02ace463bf26e44293b9776c69f3f1f84110392c3cab6e83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.042114 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2rg4s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"751d4d21-4eb8-4236-bca2-d81f094ff2f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2rg4s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.059906 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.059943 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.059954 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.059970 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.059980 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:45Z","lastTransitionTime":"2025-12-05T17:04:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.060990 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-98fvv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.075543 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65284411-042e-46b9-8e55-cabc9e78a397\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e83b48779d4b8b709f9bd7351180040315002f738ada5ab034c883f87ef8734\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f6e27e109eeb52dae4e4b35bdf84e78f0786fc8d15c2498dd8d8c70c3d64e4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7886d72b05768ec1d515cfb2837f52ae16e82445b72a3016762308b80892d55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a662a8ca2ca9bcf02ace463bf26e44293b9776c69f3f1f84110392c3cab6e83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.092258 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2rg4s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"751d4d21-4eb8-4236-bca2-d81f094ff2f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa8d4e5672a07ea28ddba02eaaf603750cd3067ff47165776d2dc7f2ad42a290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa8d4e5672a07ea28ddba02eaaf603750cd3067ff47165776d2dc7f2ad42a290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2rg4s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.110384 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://073ffcef7324b498f2cb47680b64bf44e474a7732a5c2b34661da22c3e0f6c17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://073ffcef7324b498f2cb47680b64bf44e474a7732a5c2b34661da22c3e0f6c17\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-98fvv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.123949 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78c3b80f4d08dc9fd4215fc87bbd7331f765f301fa587f5fd4ded54cd8db6587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60c5d0ca3d26171050d592c3d76f0db4ca1a12344c40825a8e7c5fd579814c80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-khn68\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.138528 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://408690fbd818b50273c914cc463acec30ff77bbf7d26680bffa2451a1d1ddcf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.156752 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8934d1ec7490b421708e4678d69a55cdd381563ada3d945d066fcf4abba49f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e799acff1e50a664e95e30896e435bc77a35c8560044c5f180e16c3534bcf828\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.165141 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.165204 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.165214 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.165229 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.165239 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:45Z","lastTransitionTime":"2025-12-05T17:04:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.172740 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c52dbe30298514a572fd7cf4a301357ef0290af5894be7f9d08aff6c3fbe85a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.185699 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hpl8r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b2cc49130697a15123f202462d2f0781903baea370d4906d22d1b39c23320d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmzrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hpl8r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.200219 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee37fdbe-64d5-4bb1-8522-932d83e0648e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://195b8bf18e6cf937def214e377ec9def799c19ea4d5f008b66cc42048656ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a5f37b26bfe7933088e08379662250e7b188efbbb5784d49e8d86262e10416\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0061bf86592b301a0a9398d2938e614f76616ed7ddcf310aa50c961632fb58b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20eb41ef00b7711fdfef0e4f3bd83b2fbdb193be96cafc0ef87f048271e7dd78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fea878c0275a3934d26327c13cf5a93ae3b881132788f3d031af5b382bbdd02\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 17:04:35.064086 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 17:04:35.066802 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1773129596/tls.crt::/tmp/serving-cert-1773129596/tls.key\\\\\\\"\\\\nI1205 17:04:40.565607 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 17:04:40.585450 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 17:04:40.585477 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 17:04:40.585506 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 17:04:40.585513 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:04:40.600592 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:04:40.600617 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600622 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600626 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:04:40.600629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:04:40.600632 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:04:40.600635 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:04:40.600704 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 17:04:40.602558 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0585e546af65a61265267b97083556b811ad35ee6c48fa84262b3f4f25eaf907\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.213499 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.231427 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.250849 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vj5f7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f38be29-f040-4e7d-9026-36929c0c5cda\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b5d403644e00b367518058ecf22ed9913df9a1353e2fede8802deaa288a48e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc2cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vj5f7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.267001 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.267048 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.267060 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.267076 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.267088 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:45Z","lastTransitionTime":"2025-12-05T17:04:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.271788 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.370297 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.370340 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.370349 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.370364 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.370374 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:45Z","lastTransitionTime":"2025-12-05T17:04:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.472823 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.472866 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.472879 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.472897 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.472908 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:45Z","lastTransitionTime":"2025-12-05T17:04:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.575012 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.575043 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.575052 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.575065 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.575073 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:45Z","lastTransitionTime":"2025-12-05T17:04:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.649245 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-f6qn6"] Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.649661 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-f6qn6" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.651642 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.651932 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.652119 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.652292 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.662937 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c52dbe30298514a572fd7cf4a301357ef0290af5894be7f9d08aff6c3fbe85a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.677539 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.677590 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.677602 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.677620 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.677635 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:45Z","lastTransitionTime":"2025-12-05T17:04:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.677760 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hpl8r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b2cc49130697a15123f202462d2f0781903baea370d4906d22d1b39c23320d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmzrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hpl8r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.691828 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee37fdbe-64d5-4bb1-8522-932d83e0648e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://195b8bf18e6cf937def214e377ec9def799c19ea4d5f008b66cc42048656ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a5f37b26bfe7933088e08379662250e7b188efbbb5784d49e8d86262e10416\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0061bf86592b301a0a9398d2938e614f76616ed7ddcf310aa50c961632fb58b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20eb41ef00b7711fdfef0e4f3bd83b2fbdb193be96cafc0ef87f048271e7dd78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fea878c0275a3934d26327c13cf5a93ae3b881132788f3d031af5b382bbdd02\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 17:04:35.064086 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 17:04:35.066802 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1773129596/tls.crt::/tmp/serving-cert-1773129596/tls.key\\\\\\\"\\\\nI1205 17:04:40.565607 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 17:04:40.585450 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 17:04:40.585477 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 17:04:40.585506 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 17:04:40.585513 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:04:40.600592 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:04:40.600617 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600622 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600626 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:04:40.600629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:04:40.600632 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:04:40.600635 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:04:40.600704 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 17:04:40.602558 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0585e546af65a61265267b97083556b811ad35ee6c48fa84262b3f4f25eaf907\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.706441 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8934d1ec7490b421708e4678d69a55cdd381563ada3d945d066fcf4abba49f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e799acff1e50a664e95e30896e435bc77a35c8560044c5f180e16c3534bcf828\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.720469 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:04:45 crc kubenswrapper[4753]: E1205 17:04:45.720750 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.722851 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.738604 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vj5f7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f38be29-f040-4e7d-9026-36929c0c5cda\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b5d403644e00b367518058ecf22ed9913df9a1353e2fede8802deaa288a48e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc2cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vj5f7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.753412 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.769012 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.779630 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.779661 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.779672 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.779696 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.779707 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:45Z","lastTransitionTime":"2025-12-05T17:04:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.786173 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65284411-042e-46b9-8e55-cabc9e78a397\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e83b48779d4b8b709f9bd7351180040315002f738ada5ab034c883f87ef8734\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f6e27e109eeb52dae4e4b35bdf84e78f0786fc8d15c2498dd8d8c70c3d64e4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7886d72b05768ec1d515cfb2837f52ae16e82445b72a3016762308b80892d55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a662a8ca2ca9bcf02ace463bf26e44293b9776c69f3f1f84110392c3cab6e83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.802423 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2rg4s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"751d4d21-4eb8-4236-bca2-d81f094ff2f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa8d4e5672a07ea28ddba02eaaf603750cd3067ff47165776d2dc7f2ad42a290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa8d4e5672a07ea28ddba02eaaf603750cd3067ff47165776d2dc7f2ad42a290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2rg4s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.820272 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://073ffcef7324b498f2cb47680b64bf44e474a7732a5c2b34661da22c3e0f6c17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://073ffcef7324b498f2cb47680b64bf44e474a7732a5c2b34661da22c3e0f6c17\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-98fvv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.825801 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g4w6q\" (UniqueName: \"kubernetes.io/projected/693c26bb-e75f-4f7f-bd2a-bdf7dcb0af06-kube-api-access-g4w6q\") pod \"node-ca-f6qn6\" (UID: \"693c26bb-e75f-4f7f-bd2a-bdf7dcb0af06\") " pod="openshift-image-registry/node-ca-f6qn6" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.825846 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/693c26bb-e75f-4f7f-bd2a-bdf7dcb0af06-host\") pod \"node-ca-f6qn6\" (UID: \"693c26bb-e75f-4f7f-bd2a-bdf7dcb0af06\") " pod="openshift-image-registry/node-ca-f6qn6" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.825868 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/693c26bb-e75f-4f7f-bd2a-bdf7dcb0af06-serviceca\") pod \"node-ca-f6qn6\" (UID: \"693c26bb-e75f-4f7f-bd2a-bdf7dcb0af06\") " pod="openshift-image-registry/node-ca-f6qn6" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.832329 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-f6qn6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"693c26bb-e75f-4f7f-bd2a-bdf7dcb0af06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:45Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:45Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g4w6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-f6qn6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.848065 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://408690fbd818b50273c914cc463acec30ff77bbf7d26680bffa2451a1d1ddcf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.859726 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78c3b80f4d08dc9fd4215fc87bbd7331f765f301fa587f5fd4ded54cd8db6587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60c5d0ca3d26171050d592c3d76f0db4ca1a12344c40825a8e7c5fd579814c80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-khn68\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.876238 4753 generic.go:334] "Generic (PLEG): container finished" podID="751d4d21-4eb8-4236-bca2-d81f094ff2f8" containerID="87e685f0d2c857d143c3ee4b13adf2afe6796c40ae87e2a304b78783907a785a" exitCode=0 Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.876284 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-2rg4s" event={"ID":"751d4d21-4eb8-4236-bca2-d81f094ff2f8","Type":"ContainerDied","Data":"87e685f0d2c857d143c3ee4b13adf2afe6796c40ae87e2a304b78783907a785a"} Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.880385 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" event={"ID":"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a","Type":"ContainerStarted","Data":"8d0273d889da0bd6ea3db8040da2eab62fc750f308cb9a62dee71b6c9f2fadd9"} Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.880428 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" event={"ID":"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a","Type":"ContainerStarted","Data":"e79e99eb44ff95c2f9929326313e41629e6e03ebc068537bd27bc4c89ad5b0ec"} Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.880442 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" event={"ID":"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a","Type":"ContainerStarted","Data":"0cb72322d436f10b006a4bb7a91b255451aba90e86100fdf249be6443159bd89"} Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.880454 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" event={"ID":"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a","Type":"ContainerStarted","Data":"6f6585b3b62fc4a78f89b3413326d6d5259cb8c338936f2d5def6185d81d46f2"} Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.880463 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" event={"ID":"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a","Type":"ContainerStarted","Data":"20bcab2c2912120c819bacdf478db82a78500908125e6efadaef1f3409eb0d49"} Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.880473 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" event={"ID":"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a","Type":"ContainerStarted","Data":"9ead15f2a3179f03d770beadcaa1374561d5aebb4874615a334edccff0c2f300"} Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.881119 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.881140 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.881170 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.881184 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.881195 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:45Z","lastTransitionTime":"2025-12-05T17:04:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.889034 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65284411-042e-46b9-8e55-cabc9e78a397\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e83b48779d4b8b709f9bd7351180040315002f738ada5ab034c883f87ef8734\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f6e27e109eeb52dae4e4b35bdf84e78f0786fc8d15c2498dd8d8c70c3d64e4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7886d72b05768ec1d515cfb2837f52ae16e82445b72a3016762308b80892d55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a662a8ca2ca9bcf02ace463bf26e44293b9776c69f3f1f84110392c3cab6e83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.903713 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2rg4s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"751d4d21-4eb8-4236-bca2-d81f094ff2f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa8d4e5672a07ea28ddba02eaaf603750cd3067ff47165776d2dc7f2ad42a290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa8d4e5672a07ea28ddba02eaaf603750cd3067ff47165776d2dc7f2ad42a290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87e685f0d2c857d143c3ee4b13adf2afe6796c40ae87e2a304b78783907a785a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87e685f0d2c857d143c3ee4b13adf2afe6796c40ae87e2a304b78783907a785a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2rg4s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.922334 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://073ffcef7324b498f2cb47680b64bf44e474a7732a5c2b34661da22c3e0f6c17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://073ffcef7324b498f2cb47680b64bf44e474a7732a5c2b34661da22c3e0f6c17\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-98fvv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.926757 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g4w6q\" (UniqueName: \"kubernetes.io/projected/693c26bb-e75f-4f7f-bd2a-bdf7dcb0af06-kube-api-access-g4w6q\") pod \"node-ca-f6qn6\" (UID: \"693c26bb-e75f-4f7f-bd2a-bdf7dcb0af06\") " pod="openshift-image-registry/node-ca-f6qn6" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.926819 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/693c26bb-e75f-4f7f-bd2a-bdf7dcb0af06-host\") pod \"node-ca-f6qn6\" (UID: \"693c26bb-e75f-4f7f-bd2a-bdf7dcb0af06\") " pod="openshift-image-registry/node-ca-f6qn6" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.926846 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/693c26bb-e75f-4f7f-bd2a-bdf7dcb0af06-serviceca\") pod \"node-ca-f6qn6\" (UID: \"693c26bb-e75f-4f7f-bd2a-bdf7dcb0af06\") " pod="openshift-image-registry/node-ca-f6qn6" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.926948 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/693c26bb-e75f-4f7f-bd2a-bdf7dcb0af06-host\") pod \"node-ca-f6qn6\" (UID: \"693c26bb-e75f-4f7f-bd2a-bdf7dcb0af06\") " pod="openshift-image-registry/node-ca-f6qn6" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.927912 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/693c26bb-e75f-4f7f-bd2a-bdf7dcb0af06-serviceca\") pod \"node-ca-f6qn6\" (UID: \"693c26bb-e75f-4f7f-bd2a-bdf7dcb0af06\") " pod="openshift-image-registry/node-ca-f6qn6" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.931867 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-f6qn6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"693c26bb-e75f-4f7f-bd2a-bdf7dcb0af06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:45Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:45Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:45Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g4w6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-f6qn6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.942324 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://408690fbd818b50273c914cc463acec30ff77bbf7d26680bffa2451a1d1ddcf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.945411 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g4w6q\" (UniqueName: \"kubernetes.io/projected/693c26bb-e75f-4f7f-bd2a-bdf7dcb0af06-kube-api-access-g4w6q\") pod \"node-ca-f6qn6\" (UID: \"693c26bb-e75f-4f7f-bd2a-bdf7dcb0af06\") " pod="openshift-image-registry/node-ca-f6qn6" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.963747 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78c3b80f4d08dc9fd4215fc87bbd7331f765f301fa587f5fd4ded54cd8db6587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60c5d0ca3d26171050d592c3d76f0db4ca1a12344c40825a8e7c5fd579814c80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-khn68\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.983239 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.983274 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.983284 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.983298 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.983310 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:45Z","lastTransitionTime":"2025-12-05T17:04:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:45 crc kubenswrapper[4753]: I1205 17:04:45.992965 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-f6qn6" Dec 05 17:04:46 crc kubenswrapper[4753]: I1205 17:04:46.006998 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee37fdbe-64d5-4bb1-8522-932d83e0648e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://195b8bf18e6cf937def214e377ec9def799c19ea4d5f008b66cc42048656ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a5f37b26bfe7933088e08379662250e7b188efbbb5784d49e8d86262e10416\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0061bf86592b301a0a9398d2938e614f76616ed7ddcf310aa50c961632fb58b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20eb41ef00b7711fdfef0e4f3bd83b2fbdb193be96cafc0ef87f048271e7dd78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fea878c0275a3934d26327c13cf5a93ae3b881132788f3d031af5b382bbdd02\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 17:04:35.064086 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 17:04:35.066802 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1773129596/tls.crt::/tmp/serving-cert-1773129596/tls.key\\\\\\\"\\\\nI1205 17:04:40.565607 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 17:04:40.585450 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 17:04:40.585477 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 17:04:40.585506 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 17:04:40.585513 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:04:40.600592 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:04:40.600617 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600622 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600626 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:04:40.600629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:04:40.600632 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:04:40.600635 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:04:40.600704 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 17:04:40.602558 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0585e546af65a61265267b97083556b811ad35ee6c48fa84262b3f4f25eaf907\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:46Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:46 crc kubenswrapper[4753]: W1205 17:04:46.041770 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod693c26bb_e75f_4f7f_bd2a_bdf7dcb0af06.slice/crio-47992b3d3f1def8c4791fc29c22bb0a9911ff1971a74b6d7b305eef1010468c4 WatchSource:0}: Error finding container 47992b3d3f1def8c4791fc29c22bb0a9911ff1971a74b6d7b305eef1010468c4: Status 404 returned error can't find the container with id 47992b3d3f1def8c4791fc29c22bb0a9911ff1971a74b6d7b305eef1010468c4 Dec 05 17:04:46 crc kubenswrapper[4753]: I1205 17:04:46.049555 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8934d1ec7490b421708e4678d69a55cdd381563ada3d945d066fcf4abba49f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e799acff1e50a664e95e30896e435bc77a35c8560044c5f180e16c3534bcf828\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:46Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:46 crc kubenswrapper[4753]: I1205 17:04:46.085957 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:46 crc kubenswrapper[4753]: I1205 17:04:46.086000 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:46 crc kubenswrapper[4753]: I1205 17:04:46.086012 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:46 crc kubenswrapper[4753]: I1205 17:04:46.086031 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:46 crc kubenswrapper[4753]: I1205 17:04:46.086045 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:46Z","lastTransitionTime":"2025-12-05T17:04:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:46 crc kubenswrapper[4753]: I1205 17:04:46.086682 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c52dbe30298514a572fd7cf4a301357ef0290af5894be7f9d08aff6c3fbe85a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:46Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:46 crc kubenswrapper[4753]: I1205 17:04:46.178402 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hpl8r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b2cc49130697a15123f202462d2f0781903baea370d4906d22d1b39c23320d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmzrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hpl8r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:46Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:46 crc kubenswrapper[4753]: I1205 17:04:46.191014 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:46Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:46 crc kubenswrapper[4753]: I1205 17:04:46.192028 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:46 crc kubenswrapper[4753]: I1205 17:04:46.192066 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:46 crc kubenswrapper[4753]: I1205 17:04:46.192101 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:46 crc kubenswrapper[4753]: I1205 17:04:46.192118 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:46 crc kubenswrapper[4753]: I1205 17:04:46.192127 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:46Z","lastTransitionTime":"2025-12-05T17:04:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:46 crc kubenswrapper[4753]: I1205 17:04:46.213705 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:46Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:46 crc kubenswrapper[4753]: I1205 17:04:46.253625 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:46Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:46 crc kubenswrapper[4753]: I1205 17:04:46.286214 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vj5f7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f38be29-f040-4e7d-9026-36929c0c5cda\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b5d403644e00b367518058ecf22ed9913df9a1353e2fede8802deaa288a48e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc2cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vj5f7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:46Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:46 crc kubenswrapper[4753]: I1205 17:04:46.295766 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:46 crc kubenswrapper[4753]: I1205 17:04:46.295820 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:46 crc kubenswrapper[4753]: I1205 17:04:46.295832 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:46 crc kubenswrapper[4753]: I1205 17:04:46.295851 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:46 crc kubenswrapper[4753]: I1205 17:04:46.295864 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:46Z","lastTransitionTime":"2025-12-05T17:04:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:46 crc kubenswrapper[4753]: I1205 17:04:46.399740 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:46 crc kubenswrapper[4753]: I1205 17:04:46.400342 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:46 crc kubenswrapper[4753]: I1205 17:04:46.400356 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:46 crc kubenswrapper[4753]: I1205 17:04:46.400385 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:46 crc kubenswrapper[4753]: I1205 17:04:46.400400 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:46Z","lastTransitionTime":"2025-12-05T17:04:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:46 crc kubenswrapper[4753]: I1205 17:04:46.503660 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:46 crc kubenswrapper[4753]: I1205 17:04:46.503726 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:46 crc kubenswrapper[4753]: I1205 17:04:46.503743 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:46 crc kubenswrapper[4753]: I1205 17:04:46.503770 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:46 crc kubenswrapper[4753]: I1205 17:04:46.503787 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:46Z","lastTransitionTime":"2025-12-05T17:04:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:46 crc kubenswrapper[4753]: I1205 17:04:46.607315 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:46 crc kubenswrapper[4753]: I1205 17:04:46.607389 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:46 crc kubenswrapper[4753]: I1205 17:04:46.607407 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:46 crc kubenswrapper[4753]: I1205 17:04:46.607436 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:46 crc kubenswrapper[4753]: I1205 17:04:46.607453 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:46Z","lastTransitionTime":"2025-12-05T17:04:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:46 crc kubenswrapper[4753]: I1205 17:04:46.709925 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:46 crc kubenswrapper[4753]: I1205 17:04:46.709964 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:46 crc kubenswrapper[4753]: I1205 17:04:46.709972 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:46 crc kubenswrapper[4753]: I1205 17:04:46.709986 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:46 crc kubenswrapper[4753]: I1205 17:04:46.709994 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:46Z","lastTransitionTime":"2025-12-05T17:04:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:46 crc kubenswrapper[4753]: I1205 17:04:46.719543 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:04:46 crc kubenswrapper[4753]: I1205 17:04:46.719542 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:04:46 crc kubenswrapper[4753]: E1205 17:04:46.719678 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:04:46 crc kubenswrapper[4753]: E1205 17:04:46.719723 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:04:46 crc kubenswrapper[4753]: I1205 17:04:46.812289 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:46 crc kubenswrapper[4753]: I1205 17:04:46.812344 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:46 crc kubenswrapper[4753]: I1205 17:04:46.812357 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:46 crc kubenswrapper[4753]: I1205 17:04:46.812376 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:46 crc kubenswrapper[4753]: I1205 17:04:46.812387 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:46Z","lastTransitionTime":"2025-12-05T17:04:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:46 crc kubenswrapper[4753]: I1205 17:04:46.886409 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-f6qn6" event={"ID":"693c26bb-e75f-4f7f-bd2a-bdf7dcb0af06","Type":"ContainerStarted","Data":"02ea0085a961c324f4a426d68972043f4b24b3aa03aed623ebc46bbafc857d1a"} Dec 05 17:04:46 crc kubenswrapper[4753]: I1205 17:04:46.886483 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-f6qn6" event={"ID":"693c26bb-e75f-4f7f-bd2a-bdf7dcb0af06","Type":"ContainerStarted","Data":"47992b3d3f1def8c4791fc29c22bb0a9911ff1971a74b6d7b305eef1010468c4"} Dec 05 17:04:46 crc kubenswrapper[4753]: I1205 17:04:46.889228 4753 generic.go:334] "Generic (PLEG): container finished" podID="751d4d21-4eb8-4236-bca2-d81f094ff2f8" containerID="842a75d6b100069785dbaf413df3b610a5df28c9ec124100eb31c965985282fa" exitCode=0 Dec 05 17:04:46 crc kubenswrapper[4753]: I1205 17:04:46.889274 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-2rg4s" event={"ID":"751d4d21-4eb8-4236-bca2-d81f094ff2f8","Type":"ContainerDied","Data":"842a75d6b100069785dbaf413df3b610a5df28c9ec124100eb31c965985282fa"} Dec 05 17:04:46 crc kubenswrapper[4753]: I1205 17:04:46.904627 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-f6qn6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"693c26bb-e75f-4f7f-bd2a-bdf7dcb0af06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02ea0085a961c324f4a426d68972043f4b24b3aa03aed623ebc46bbafc857d1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g4w6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-f6qn6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:46Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:46 crc kubenswrapper[4753]: I1205 17:04:46.916526 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:46 crc kubenswrapper[4753]: I1205 17:04:46.916573 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:46 crc kubenswrapper[4753]: I1205 17:04:46.916586 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:46 crc kubenswrapper[4753]: I1205 17:04:46.916605 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:46 crc kubenswrapper[4753]: I1205 17:04:46.916620 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:46Z","lastTransitionTime":"2025-12-05T17:04:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:46 crc kubenswrapper[4753]: I1205 17:04:46.922557 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65284411-042e-46b9-8e55-cabc9e78a397\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e83b48779d4b8b709f9bd7351180040315002f738ada5ab034c883f87ef8734\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f6e27e109eeb52dae4e4b35bdf84e78f0786fc8d15c2498dd8d8c70c3d64e4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7886d72b05768ec1d515cfb2837f52ae16e82445b72a3016762308b80892d55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a662a8ca2ca9bcf02ace463bf26e44293b9776c69f3f1f84110392c3cab6e83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:46Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:46 crc kubenswrapper[4753]: I1205 17:04:46.943236 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2rg4s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"751d4d21-4eb8-4236-bca2-d81f094ff2f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa8d4e5672a07ea28ddba02eaaf603750cd3067ff47165776d2dc7f2ad42a290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa8d4e5672a07ea28ddba02eaaf603750cd3067ff47165776d2dc7f2ad42a290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87e685f0d2c857d143c3ee4b13adf2afe6796c40ae87e2a304b78783907a785a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87e685f0d2c857d143c3ee4b13adf2afe6796c40ae87e2a304b78783907a785a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2rg4s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:46Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:46 crc kubenswrapper[4753]: I1205 17:04:46.966712 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://073ffcef7324b498f2cb47680b64bf44e474a7732a5c2b34661da22c3e0f6c17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://073ffcef7324b498f2cb47680b64bf44e474a7732a5c2b34661da22c3e0f6c17\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-98fvv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:46Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:46 crc kubenswrapper[4753]: I1205 17:04:46.979908 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://408690fbd818b50273c914cc463acec30ff77bbf7d26680bffa2451a1d1ddcf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:46Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:46 crc kubenswrapper[4753]: I1205 17:04:46.992598 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78c3b80f4d08dc9fd4215fc87bbd7331f765f301fa587f5fd4ded54cd8db6587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60c5d0ca3d26171050d592c3d76f0db4ca1a12344c40825a8e7c5fd579814c80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-khn68\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:46Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.007177 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee37fdbe-64d5-4bb1-8522-932d83e0648e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://195b8bf18e6cf937def214e377ec9def799c19ea4d5f008b66cc42048656ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a5f37b26bfe7933088e08379662250e7b188efbbb5784d49e8d86262e10416\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0061bf86592b301a0a9398d2938e614f76616ed7ddcf310aa50c961632fb58b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20eb41ef00b7711fdfef0e4f3bd83b2fbdb193be96cafc0ef87f048271e7dd78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fea878c0275a3934d26327c13cf5a93ae3b881132788f3d031af5b382bbdd02\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 17:04:35.064086 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 17:04:35.066802 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1773129596/tls.crt::/tmp/serving-cert-1773129596/tls.key\\\\\\\"\\\\nI1205 17:04:40.565607 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 17:04:40.585450 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 17:04:40.585477 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 17:04:40.585506 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 17:04:40.585513 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:04:40.600592 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:04:40.600617 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600622 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600626 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:04:40.600629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:04:40.600632 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:04:40.600635 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:04:40.600704 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 17:04:40.602558 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0585e546af65a61265267b97083556b811ad35ee6c48fa84262b3f4f25eaf907\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:47Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.019063 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.019104 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.019113 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.019130 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.019141 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:47Z","lastTransitionTime":"2025-12-05T17:04:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.021088 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8934d1ec7490b421708e4678d69a55cdd381563ada3d945d066fcf4abba49f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e799acff1e50a664e95e30896e435bc77a35c8560044c5f180e16c3534bcf828\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:47Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.034125 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c52dbe30298514a572fd7cf4a301357ef0290af5894be7f9d08aff6c3fbe85a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:47Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.047088 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hpl8r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b2cc49130697a15123f202462d2f0781903baea370d4906d22d1b39c23320d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmzrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hpl8r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:47Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.063906 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:47Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.078696 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:47Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.094429 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:47Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.104277 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vj5f7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f38be29-f040-4e7d-9026-36929c0c5cda\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b5d403644e00b367518058ecf22ed9913df9a1353e2fede8802deaa288a48e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc2cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vj5f7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:47Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.119637 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8934d1ec7490b421708e4678d69a55cdd381563ada3d945d066fcf4abba49f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e799acff1e50a664e95e30896e435bc77a35c8560044c5f180e16c3534bcf828\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:47Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.121701 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.121752 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.121766 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.121784 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.121798 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:47Z","lastTransitionTime":"2025-12-05T17:04:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.138346 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c52dbe30298514a572fd7cf4a301357ef0290af5894be7f9d08aff6c3fbe85a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:47Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.152484 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hpl8r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b2cc49130697a15123f202462d2f0781903baea370d4906d22d1b39c23320d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmzrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hpl8r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:47Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.166131 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee37fdbe-64d5-4bb1-8522-932d83e0648e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://195b8bf18e6cf937def214e377ec9def799c19ea4d5f008b66cc42048656ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a5f37b26bfe7933088e08379662250e7b188efbbb5784d49e8d86262e10416\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0061bf86592b301a0a9398d2938e614f76616ed7ddcf310aa50c961632fb58b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20eb41ef00b7711fdfef0e4f3bd83b2fbdb193be96cafc0ef87f048271e7dd78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fea878c0275a3934d26327c13cf5a93ae3b881132788f3d031af5b382bbdd02\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 17:04:35.064086 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 17:04:35.066802 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1773129596/tls.crt::/tmp/serving-cert-1773129596/tls.key\\\\\\\"\\\\nI1205 17:04:40.565607 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 17:04:40.585450 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 17:04:40.585477 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 17:04:40.585506 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 17:04:40.585513 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:04:40.600592 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:04:40.600617 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600622 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600626 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:04:40.600629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:04:40.600632 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:04:40.600635 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:04:40.600704 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 17:04:40.602558 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0585e546af65a61265267b97083556b811ad35ee6c48fa84262b3f4f25eaf907\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:47Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.179244 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:47Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.192077 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:47Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.201963 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vj5f7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f38be29-f040-4e7d-9026-36929c0c5cda\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b5d403644e00b367518058ecf22ed9913df9a1353e2fede8802deaa288a48e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc2cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vj5f7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:47Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.212864 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:47Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.224263 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.224309 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.224321 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.224340 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.224354 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:47Z","lastTransitionTime":"2025-12-05T17:04:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.225501 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65284411-042e-46b9-8e55-cabc9e78a397\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e83b48779d4b8b709f9bd7351180040315002f738ada5ab034c883f87ef8734\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f6e27e109eeb52dae4e4b35bdf84e78f0786fc8d15c2498dd8d8c70c3d64e4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7886d72b05768ec1d515cfb2837f52ae16e82445b72a3016762308b80892d55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a662a8ca2ca9bcf02ace463bf26e44293b9776c69f3f1f84110392c3cab6e83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:47Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.247606 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2rg4s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"751d4d21-4eb8-4236-bca2-d81f094ff2f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa8d4e5672a07ea28ddba02eaaf603750cd3067ff47165776d2dc7f2ad42a290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa8d4e5672a07ea28ddba02eaaf603750cd3067ff47165776d2dc7f2ad42a290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87e685f0d2c857d143c3ee4b13adf2afe6796c40ae87e2a304b78783907a785a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87e685f0d2c857d143c3ee4b13adf2afe6796c40ae87e2a304b78783907a785a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://842a75d6b100069785dbaf413df3b610a5df28c9ec124100eb31c965985282fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://842a75d6b100069785dbaf413df3b610a5df28c9ec124100eb31c965985282fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2rg4s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:47Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.292608 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://073ffcef7324b498f2cb47680b64bf44e474a7732a5c2b34661da22c3e0f6c17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://073ffcef7324b498f2cb47680b64bf44e474a7732a5c2b34661da22c3e0f6c17\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-98fvv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:47Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.325999 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.326036 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.326044 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.326058 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.326068 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:47Z","lastTransitionTime":"2025-12-05T17:04:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.326500 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-f6qn6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"693c26bb-e75f-4f7f-bd2a-bdf7dcb0af06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02ea0085a961c324f4a426d68972043f4b24b3aa03aed623ebc46bbafc857d1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g4w6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-f6qn6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:47Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.364859 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78c3b80f4d08dc9fd4215fc87bbd7331f765f301fa587f5fd4ded54cd8db6587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60c5d0ca3d26171050d592c3d76f0db4ca1a12344c40825a8e7c5fd579814c80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-khn68\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:47Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.404244 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://408690fbd818b50273c914cc463acec30ff77bbf7d26680bffa2451a1d1ddcf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:47Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.428843 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.428878 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.428889 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.428903 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.428913 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:47Z","lastTransitionTime":"2025-12-05T17:04:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.530952 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.531015 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.531037 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.531064 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.531087 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:47Z","lastTransitionTime":"2025-12-05T17:04:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.633516 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.633562 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.633575 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.633594 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.633608 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:47Z","lastTransitionTime":"2025-12-05T17:04:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.719998 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:04:47 crc kubenswrapper[4753]: E1205 17:04:47.720204 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.736282 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.736334 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.736344 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.736361 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.736373 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:47Z","lastTransitionTime":"2025-12-05T17:04:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.838655 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.838793 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.838883 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.838983 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.839081 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:47Z","lastTransitionTime":"2025-12-05T17:04:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.894601 4753 generic.go:334] "Generic (PLEG): container finished" podID="751d4d21-4eb8-4236-bca2-d81f094ff2f8" containerID="f4ac00f8ef346db3de91e7bec01a56eac9f27d5a501d6f686173ece4897b9694" exitCode=0 Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.894706 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-2rg4s" event={"ID":"751d4d21-4eb8-4236-bca2-d81f094ff2f8","Type":"ContainerDied","Data":"f4ac00f8ef346db3de91e7bec01a56eac9f27d5a501d6f686173ece4897b9694"} Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.913904 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee37fdbe-64d5-4bb1-8522-932d83e0648e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://195b8bf18e6cf937def214e377ec9def799c19ea4d5f008b66cc42048656ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a5f37b26bfe7933088e08379662250e7b188efbbb5784d49e8d86262e10416\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0061bf86592b301a0a9398d2938e614f76616ed7ddcf310aa50c961632fb58b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20eb41ef00b7711fdfef0e4f3bd83b2fbdb193be96cafc0ef87f048271e7dd78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fea878c0275a3934d26327c13cf5a93ae3b881132788f3d031af5b382bbdd02\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 17:04:35.064086 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 17:04:35.066802 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1773129596/tls.crt::/tmp/serving-cert-1773129596/tls.key\\\\\\\"\\\\nI1205 17:04:40.565607 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 17:04:40.585450 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 17:04:40.585477 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 17:04:40.585506 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 17:04:40.585513 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:04:40.600592 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:04:40.600617 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600622 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600626 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:04:40.600629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:04:40.600632 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:04:40.600635 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:04:40.600704 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 17:04:40.602558 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0585e546af65a61265267b97083556b811ad35ee6c48fa84262b3f4f25eaf907\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:47Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.930281 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8934d1ec7490b421708e4678d69a55cdd381563ada3d945d066fcf4abba49f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e799acff1e50a664e95e30896e435bc77a35c8560044c5f180e16c3534bcf828\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:47Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.942193 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.942224 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.942232 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.942247 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.942255 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:47Z","lastTransitionTime":"2025-12-05T17:04:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.953579 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c52dbe30298514a572fd7cf4a301357ef0290af5894be7f9d08aff6c3fbe85a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:47Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.968724 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hpl8r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b2cc49130697a15123f202462d2f0781903baea370d4906d22d1b39c23320d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmzrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hpl8r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:47Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.981253 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:47Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:47 crc kubenswrapper[4753]: I1205 17:04:47.992239 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:47Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:48 crc kubenswrapper[4753]: I1205 17:04:48.004977 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:48Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:48 crc kubenswrapper[4753]: I1205 17:04:48.015611 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vj5f7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f38be29-f040-4e7d-9026-36929c0c5cda\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b5d403644e00b367518058ecf22ed9913df9a1353e2fede8802deaa288a48e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc2cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vj5f7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:48Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:48 crc kubenswrapper[4753]: I1205 17:04:48.026467 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-f6qn6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"693c26bb-e75f-4f7f-bd2a-bdf7dcb0af06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02ea0085a961c324f4a426d68972043f4b24b3aa03aed623ebc46bbafc857d1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g4w6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-f6qn6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:48Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:48 crc kubenswrapper[4753]: I1205 17:04:48.039825 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65284411-042e-46b9-8e55-cabc9e78a397\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e83b48779d4b8b709f9bd7351180040315002f738ada5ab034c883f87ef8734\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f6e27e109eeb52dae4e4b35bdf84e78f0786fc8d15c2498dd8d8c70c3d64e4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7886d72b05768ec1d515cfb2837f52ae16e82445b72a3016762308b80892d55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a662a8ca2ca9bcf02ace463bf26e44293b9776c69f3f1f84110392c3cab6e83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:48Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:48 crc kubenswrapper[4753]: I1205 17:04:48.045507 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:48 crc kubenswrapper[4753]: I1205 17:04:48.045544 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:48 crc kubenswrapper[4753]: I1205 17:04:48.045555 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:48 crc kubenswrapper[4753]: I1205 17:04:48.045576 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:48 crc kubenswrapper[4753]: I1205 17:04:48.045589 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:48Z","lastTransitionTime":"2025-12-05T17:04:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:48 crc kubenswrapper[4753]: I1205 17:04:48.058527 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2rg4s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"751d4d21-4eb8-4236-bca2-d81f094ff2f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa8d4e5672a07ea28ddba02eaaf603750cd3067ff47165776d2dc7f2ad42a290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa8d4e5672a07ea28ddba02eaaf603750cd3067ff47165776d2dc7f2ad42a290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87e685f0d2c857d143c3ee4b13adf2afe6796c40ae87e2a304b78783907a785a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87e685f0d2c857d143c3ee4b13adf2afe6796c40ae87e2a304b78783907a785a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://842a75d6b100069785dbaf413df3b610a5df28c9ec124100eb31c965985282fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://842a75d6b100069785dbaf413df3b610a5df28c9ec124100eb31c965985282fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4ac00f8ef346db3de91e7bec01a56eac9f27d5a501d6f686173ece4897b9694\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4ac00f8ef346db3de91e7bec01a56eac9f27d5a501d6f686173ece4897b9694\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2rg4s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:48Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:48 crc kubenswrapper[4753]: I1205 17:04:48.083029 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://073ffcef7324b498f2cb47680b64bf44e474a7732a5c2b34661da22c3e0f6c17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://073ffcef7324b498f2cb47680b64bf44e474a7732a5c2b34661da22c3e0f6c17\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-98fvv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:48Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:48 crc kubenswrapper[4753]: I1205 17:04:48.098900 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://408690fbd818b50273c914cc463acec30ff77bbf7d26680bffa2451a1d1ddcf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:48Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:48 crc kubenswrapper[4753]: I1205 17:04:48.111495 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78c3b80f4d08dc9fd4215fc87bbd7331f765f301fa587f5fd4ded54cd8db6587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60c5d0ca3d26171050d592c3d76f0db4ca1a12344c40825a8e7c5fd579814c80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-khn68\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:48Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:48 crc kubenswrapper[4753]: I1205 17:04:48.148720 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:48 crc kubenswrapper[4753]: I1205 17:04:48.148773 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:48 crc kubenswrapper[4753]: I1205 17:04:48.148785 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:48 crc kubenswrapper[4753]: I1205 17:04:48.148798 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:48 crc kubenswrapper[4753]: I1205 17:04:48.148808 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:48Z","lastTransitionTime":"2025-12-05T17:04:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:48 crc kubenswrapper[4753]: I1205 17:04:48.251452 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:48 crc kubenswrapper[4753]: I1205 17:04:48.251499 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:48 crc kubenswrapper[4753]: I1205 17:04:48.251511 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:48 crc kubenswrapper[4753]: I1205 17:04:48.251533 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:48 crc kubenswrapper[4753]: I1205 17:04:48.251545 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:48Z","lastTransitionTime":"2025-12-05T17:04:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:48 crc kubenswrapper[4753]: I1205 17:04:48.354165 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:48 crc kubenswrapper[4753]: I1205 17:04:48.354204 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:48 crc kubenswrapper[4753]: I1205 17:04:48.354215 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:48 crc kubenswrapper[4753]: I1205 17:04:48.354230 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:48 crc kubenswrapper[4753]: I1205 17:04:48.354239 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:48Z","lastTransitionTime":"2025-12-05T17:04:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:48 crc kubenswrapper[4753]: I1205 17:04:48.457112 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:48 crc kubenswrapper[4753]: I1205 17:04:48.457164 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:48 crc kubenswrapper[4753]: I1205 17:04:48.457173 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:48 crc kubenswrapper[4753]: I1205 17:04:48.457185 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:48 crc kubenswrapper[4753]: I1205 17:04:48.457194 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:48Z","lastTransitionTime":"2025-12-05T17:04:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:48 crc kubenswrapper[4753]: I1205 17:04:48.550539 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:04:48 crc kubenswrapper[4753]: I1205 17:04:48.550674 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:04:48 crc kubenswrapper[4753]: I1205 17:04:48.550728 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:04:48 crc kubenswrapper[4753]: I1205 17:04:48.550768 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:04:48 crc kubenswrapper[4753]: I1205 17:04:48.550834 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:04:48 crc kubenswrapper[4753]: E1205 17:04:48.550885 4753 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 17:04:48 crc kubenswrapper[4753]: E1205 17:04:48.550910 4753 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 17:04:48 crc kubenswrapper[4753]: E1205 17:04:48.550910 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:04:56.550835789 +0000 UTC m=+35.053942825 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:04:48 crc kubenswrapper[4753]: E1205 17:04:48.550955 4753 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 17:04:48 crc kubenswrapper[4753]: E1205 17:04:48.550960 4753 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 17:04:48 crc kubenswrapper[4753]: E1205 17:04:48.550925 4753 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 17:04:48 crc kubenswrapper[4753]: E1205 17:04:48.551007 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 17:04:56.550990444 +0000 UTC m=+35.054097450 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 17:04:48 crc kubenswrapper[4753]: E1205 17:04:48.551060 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 17:04:56.551043406 +0000 UTC m=+35.054150442 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 17:04:48 crc kubenswrapper[4753]: E1205 17:04:48.551085 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 17:04:56.551070997 +0000 UTC m=+35.054178043 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 17:04:48 crc kubenswrapper[4753]: E1205 17:04:48.551298 4753 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 17:04:48 crc kubenswrapper[4753]: E1205 17:04:48.551348 4753 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 17:04:48 crc kubenswrapper[4753]: E1205 17:04:48.551382 4753 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 17:04:48 crc kubenswrapper[4753]: E1205 17:04:48.551525 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 17:04:56.551499131 +0000 UTC m=+35.054606357 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 17:04:48 crc kubenswrapper[4753]: I1205 17:04:48.560428 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:48 crc kubenswrapper[4753]: I1205 17:04:48.560477 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:48 crc kubenswrapper[4753]: I1205 17:04:48.560495 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:48 crc kubenswrapper[4753]: I1205 17:04:48.560526 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:48 crc kubenswrapper[4753]: I1205 17:04:48.560544 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:48Z","lastTransitionTime":"2025-12-05T17:04:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:48 crc kubenswrapper[4753]: I1205 17:04:48.663237 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:48 crc kubenswrapper[4753]: I1205 17:04:48.663272 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:48 crc kubenswrapper[4753]: I1205 17:04:48.663284 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:48 crc kubenswrapper[4753]: I1205 17:04:48.663300 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:48 crc kubenswrapper[4753]: I1205 17:04:48.663310 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:48Z","lastTransitionTime":"2025-12-05T17:04:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:48 crc kubenswrapper[4753]: I1205 17:04:48.719993 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:04:48 crc kubenswrapper[4753]: I1205 17:04:48.720017 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:04:48 crc kubenswrapper[4753]: E1205 17:04:48.720194 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:04:48 crc kubenswrapper[4753]: E1205 17:04:48.720594 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:04:48 crc kubenswrapper[4753]: I1205 17:04:48.766056 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:48 crc kubenswrapper[4753]: I1205 17:04:48.766616 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:48 crc kubenswrapper[4753]: I1205 17:04:48.766630 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:48 crc kubenswrapper[4753]: I1205 17:04:48.766679 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:48 crc kubenswrapper[4753]: I1205 17:04:48.766700 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:48Z","lastTransitionTime":"2025-12-05T17:04:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:48 crc kubenswrapper[4753]: I1205 17:04:48.869895 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:48 crc kubenswrapper[4753]: I1205 17:04:48.869939 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:48 crc kubenswrapper[4753]: I1205 17:04:48.869948 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:48 crc kubenswrapper[4753]: I1205 17:04:48.869970 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:48 crc kubenswrapper[4753]: I1205 17:04:48.869982 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:48Z","lastTransitionTime":"2025-12-05T17:04:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:48 crc kubenswrapper[4753]: I1205 17:04:48.903277 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" event={"ID":"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a","Type":"ContainerStarted","Data":"899db0b5d45776b0560a2908ce63ec8ea2b458e2e492189737e930688b4ed2e2"} Dec 05 17:04:48 crc kubenswrapper[4753]: I1205 17:04:48.906508 4753 generic.go:334] "Generic (PLEG): container finished" podID="751d4d21-4eb8-4236-bca2-d81f094ff2f8" containerID="3585437cbe0f17e5bf92a74123f0e6f13475b6e5d49b145f2b9b869559b53310" exitCode=0 Dec 05 17:04:48 crc kubenswrapper[4753]: I1205 17:04:48.906566 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-2rg4s" event={"ID":"751d4d21-4eb8-4236-bca2-d81f094ff2f8","Type":"ContainerDied","Data":"3585437cbe0f17e5bf92a74123f0e6f13475b6e5d49b145f2b9b869559b53310"} Dec 05 17:04:48 crc kubenswrapper[4753]: I1205 17:04:48.940722 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://073ffcef7324b498f2cb47680b64bf44e474a7732a5c2b34661da22c3e0f6c17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://073ffcef7324b498f2cb47680b64bf44e474a7732a5c2b34661da22c3e0f6c17\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-98fvv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:48Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:48 crc kubenswrapper[4753]: I1205 17:04:48.957861 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-f6qn6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"693c26bb-e75f-4f7f-bd2a-bdf7dcb0af06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02ea0085a961c324f4a426d68972043f4b24b3aa03aed623ebc46bbafc857d1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g4w6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-f6qn6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:48Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:48 crc kubenswrapper[4753]: I1205 17:04:48.972703 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:48 crc kubenswrapper[4753]: I1205 17:04:48.972971 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:48 crc kubenswrapper[4753]: I1205 17:04:48.973039 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:48 crc kubenswrapper[4753]: I1205 17:04:48.973116 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:48 crc kubenswrapper[4753]: I1205 17:04:48.973205 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:48Z","lastTransitionTime":"2025-12-05T17:04:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:48 crc kubenswrapper[4753]: I1205 17:04:48.973792 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65284411-042e-46b9-8e55-cabc9e78a397\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e83b48779d4b8b709f9bd7351180040315002f738ada5ab034c883f87ef8734\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f6e27e109eeb52dae4e4b35bdf84e78f0786fc8d15c2498dd8d8c70c3d64e4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7886d72b05768ec1d515cfb2837f52ae16e82445b72a3016762308b80892d55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a662a8ca2ca9bcf02ace463bf26e44293b9776c69f3f1f84110392c3cab6e83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:48Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:48 crc kubenswrapper[4753]: I1205 17:04:48.987215 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2rg4s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"751d4d21-4eb8-4236-bca2-d81f094ff2f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa8d4e5672a07ea28ddba02eaaf603750cd3067ff47165776d2dc7f2ad42a290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa8d4e5672a07ea28ddba02eaaf603750cd3067ff47165776d2dc7f2ad42a290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87e685f0d2c857d143c3ee4b13adf2afe6796c40ae87e2a304b78783907a785a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87e685f0d2c857d143c3ee4b13adf2afe6796c40ae87e2a304b78783907a785a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://842a75d6b100069785dbaf413df3b610a5df28c9ec124100eb31c965985282fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://842a75d6b100069785dbaf413df3b610a5df28c9ec124100eb31c965985282fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4ac00f8ef346db3de91e7bec01a56eac9f27d5a501d6f686173ece4897b9694\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4ac00f8ef346db3de91e7bec01a56eac9f27d5a501d6f686173ece4897b9694\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3585437cbe0f17e5bf92a74123f0e6f13475b6e5d49b145f2b9b869559b53310\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3585437cbe0f17e5bf92a74123f0e6f13475b6e5d49b145f2b9b869559b53310\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2rg4s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:48Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:49 crc kubenswrapper[4753]: I1205 17:04:49.000430 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://408690fbd818b50273c914cc463acec30ff77bbf7d26680bffa2451a1d1ddcf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:48Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:49 crc kubenswrapper[4753]: I1205 17:04:49.010929 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78c3b80f4d08dc9fd4215fc87bbd7331f765f301fa587f5fd4ded54cd8db6587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60c5d0ca3d26171050d592c3d76f0db4ca1a12344c40825a8e7c5fd579814c80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-khn68\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:49Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:49 crc kubenswrapper[4753]: I1205 17:04:49.024478 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee37fdbe-64d5-4bb1-8522-932d83e0648e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://195b8bf18e6cf937def214e377ec9def799c19ea4d5f008b66cc42048656ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a5f37b26bfe7933088e08379662250e7b188efbbb5784d49e8d86262e10416\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0061bf86592b301a0a9398d2938e614f76616ed7ddcf310aa50c961632fb58b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20eb41ef00b7711fdfef0e4f3bd83b2fbdb193be96cafc0ef87f048271e7dd78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fea878c0275a3934d26327c13cf5a93ae3b881132788f3d031af5b382bbdd02\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 17:04:35.064086 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 17:04:35.066802 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1773129596/tls.crt::/tmp/serving-cert-1773129596/tls.key\\\\\\\"\\\\nI1205 17:04:40.565607 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 17:04:40.585450 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 17:04:40.585477 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 17:04:40.585506 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 17:04:40.585513 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:04:40.600592 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:04:40.600617 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600622 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600626 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:04:40.600629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:04:40.600632 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:04:40.600635 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:04:40.600704 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 17:04:40.602558 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0585e546af65a61265267b97083556b811ad35ee6c48fa84262b3f4f25eaf907\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:49Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:49 crc kubenswrapper[4753]: I1205 17:04:49.040216 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8934d1ec7490b421708e4678d69a55cdd381563ada3d945d066fcf4abba49f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e799acff1e50a664e95e30896e435bc77a35c8560044c5f180e16c3534bcf828\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:49Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:49 crc kubenswrapper[4753]: I1205 17:04:49.056282 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c52dbe30298514a572fd7cf4a301357ef0290af5894be7f9d08aff6c3fbe85a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:49Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:49 crc kubenswrapper[4753]: I1205 17:04:49.069543 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hpl8r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b2cc49130697a15123f202462d2f0781903baea370d4906d22d1b39c23320d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmzrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hpl8r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:49Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:49 crc kubenswrapper[4753]: I1205 17:04:49.075537 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:49 crc kubenswrapper[4753]: I1205 17:04:49.075591 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:49 crc kubenswrapper[4753]: I1205 17:04:49.075605 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:49 crc kubenswrapper[4753]: I1205 17:04:49.075629 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:49 crc kubenswrapper[4753]: I1205 17:04:49.075646 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:49Z","lastTransitionTime":"2025-12-05T17:04:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:49 crc kubenswrapper[4753]: I1205 17:04:49.083622 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:49Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:49 crc kubenswrapper[4753]: I1205 17:04:49.098573 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:49Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:49 crc kubenswrapper[4753]: I1205 17:04:49.112056 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:49Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:49 crc kubenswrapper[4753]: I1205 17:04:49.125652 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vj5f7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f38be29-f040-4e7d-9026-36929c0c5cda\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b5d403644e00b367518058ecf22ed9913df9a1353e2fede8802deaa288a48e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc2cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vj5f7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:49Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:49 crc kubenswrapper[4753]: I1205 17:04:49.178719 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:49 crc kubenswrapper[4753]: I1205 17:04:49.178760 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:49 crc kubenswrapper[4753]: I1205 17:04:49.178775 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:49 crc kubenswrapper[4753]: I1205 17:04:49.178794 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:49 crc kubenswrapper[4753]: I1205 17:04:49.178809 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:49Z","lastTransitionTime":"2025-12-05T17:04:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:49 crc kubenswrapper[4753]: I1205 17:04:49.281455 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:49 crc kubenswrapper[4753]: I1205 17:04:49.281512 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:49 crc kubenswrapper[4753]: I1205 17:04:49.281523 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:49 crc kubenswrapper[4753]: I1205 17:04:49.281548 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:49 crc kubenswrapper[4753]: I1205 17:04:49.281562 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:49Z","lastTransitionTime":"2025-12-05T17:04:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:49 crc kubenswrapper[4753]: I1205 17:04:49.386718 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:49 crc kubenswrapper[4753]: I1205 17:04:49.386795 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:49 crc kubenswrapper[4753]: I1205 17:04:49.386815 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:49 crc kubenswrapper[4753]: I1205 17:04:49.386847 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:49 crc kubenswrapper[4753]: I1205 17:04:49.386868 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:49Z","lastTransitionTime":"2025-12-05T17:04:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:49 crc kubenswrapper[4753]: I1205 17:04:49.490120 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:49 crc kubenswrapper[4753]: I1205 17:04:49.490220 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:49 crc kubenswrapper[4753]: I1205 17:04:49.490246 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:49 crc kubenswrapper[4753]: I1205 17:04:49.490281 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:49 crc kubenswrapper[4753]: I1205 17:04:49.490305 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:49Z","lastTransitionTime":"2025-12-05T17:04:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:49 crc kubenswrapper[4753]: I1205 17:04:49.593117 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:49 crc kubenswrapper[4753]: I1205 17:04:49.593187 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:49 crc kubenswrapper[4753]: I1205 17:04:49.593203 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:49 crc kubenswrapper[4753]: I1205 17:04:49.593222 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:49 crc kubenswrapper[4753]: I1205 17:04:49.593233 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:49Z","lastTransitionTime":"2025-12-05T17:04:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:49 crc kubenswrapper[4753]: I1205 17:04:49.696752 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:49 crc kubenswrapper[4753]: I1205 17:04:49.696821 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:49 crc kubenswrapper[4753]: I1205 17:04:49.696842 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:49 crc kubenswrapper[4753]: I1205 17:04:49.696872 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:49 crc kubenswrapper[4753]: I1205 17:04:49.696894 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:49Z","lastTransitionTime":"2025-12-05T17:04:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:49 crc kubenswrapper[4753]: I1205 17:04:49.720279 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:04:49 crc kubenswrapper[4753]: E1205 17:04:49.720504 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:04:49 crc kubenswrapper[4753]: I1205 17:04:49.799207 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:49 crc kubenswrapper[4753]: I1205 17:04:49.799259 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:49 crc kubenswrapper[4753]: I1205 17:04:49.799272 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:49 crc kubenswrapper[4753]: I1205 17:04:49.799289 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:49 crc kubenswrapper[4753]: I1205 17:04:49.799301 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:49Z","lastTransitionTime":"2025-12-05T17:04:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:49 crc kubenswrapper[4753]: I1205 17:04:49.902727 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:49 crc kubenswrapper[4753]: I1205 17:04:49.902775 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:49 crc kubenswrapper[4753]: I1205 17:04:49.902786 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:49 crc kubenswrapper[4753]: I1205 17:04:49.902807 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:49 crc kubenswrapper[4753]: I1205 17:04:49.902819 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:49Z","lastTransitionTime":"2025-12-05T17:04:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:49 crc kubenswrapper[4753]: I1205 17:04:49.911517 4753 generic.go:334] "Generic (PLEG): container finished" podID="751d4d21-4eb8-4236-bca2-d81f094ff2f8" containerID="9e60aa96f2377e63e44e932efe51f79832b778b8ef41fd47e728a80286956c62" exitCode=0 Dec 05 17:04:49 crc kubenswrapper[4753]: I1205 17:04:49.911563 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-2rg4s" event={"ID":"751d4d21-4eb8-4236-bca2-d81f094ff2f8","Type":"ContainerDied","Data":"9e60aa96f2377e63e44e932efe51f79832b778b8ef41fd47e728a80286956c62"} Dec 05 17:04:49 crc kubenswrapper[4753]: I1205 17:04:49.929138 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hpl8r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b2cc49130697a15123f202462d2f0781903baea370d4906d22d1b39c23320d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmzrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hpl8r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:49Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:49 crc kubenswrapper[4753]: I1205 17:04:49.949001 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee37fdbe-64d5-4bb1-8522-932d83e0648e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://195b8bf18e6cf937def214e377ec9def799c19ea4d5f008b66cc42048656ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a5f37b26bfe7933088e08379662250e7b188efbbb5784d49e8d86262e10416\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0061bf86592b301a0a9398d2938e614f76616ed7ddcf310aa50c961632fb58b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20eb41ef00b7711fdfef0e4f3bd83b2fbdb193be96cafc0ef87f048271e7dd78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fea878c0275a3934d26327c13cf5a93ae3b881132788f3d031af5b382bbdd02\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 17:04:35.064086 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 17:04:35.066802 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1773129596/tls.crt::/tmp/serving-cert-1773129596/tls.key\\\\\\\"\\\\nI1205 17:04:40.565607 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 17:04:40.585450 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 17:04:40.585477 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 17:04:40.585506 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 17:04:40.585513 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:04:40.600592 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:04:40.600617 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600622 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600626 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:04:40.600629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:04:40.600632 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:04:40.600635 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:04:40.600704 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 17:04:40.602558 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0585e546af65a61265267b97083556b811ad35ee6c48fa84262b3f4f25eaf907\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:49Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:49 crc kubenswrapper[4753]: I1205 17:04:49.961951 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8934d1ec7490b421708e4678d69a55cdd381563ada3d945d066fcf4abba49f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e799acff1e50a664e95e30896e435bc77a35c8560044c5f180e16c3534bcf828\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:49Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:49 crc kubenswrapper[4753]: I1205 17:04:49.981634 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c52dbe30298514a572fd7cf4a301357ef0290af5894be7f9d08aff6c3fbe85a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:49Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:50 crc kubenswrapper[4753]: I1205 17:04:50.001912 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vj5f7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f38be29-f040-4e7d-9026-36929c0c5cda\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b5d403644e00b367518058ecf22ed9913df9a1353e2fede8802deaa288a48e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc2cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vj5f7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:49Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:50 crc kubenswrapper[4753]: I1205 17:04:50.004778 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:50 crc kubenswrapper[4753]: I1205 17:04:50.004800 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:50 crc kubenswrapper[4753]: I1205 17:04:50.004808 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:50 crc kubenswrapper[4753]: I1205 17:04:50.004821 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:50 crc kubenswrapper[4753]: I1205 17:04:50.004831 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:50Z","lastTransitionTime":"2025-12-05T17:04:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:50 crc kubenswrapper[4753]: I1205 17:04:50.018516 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:50Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:50 crc kubenswrapper[4753]: I1205 17:04:50.034105 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:50Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:50 crc kubenswrapper[4753]: I1205 17:04:50.046482 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:50Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:50 crc kubenswrapper[4753]: I1205 17:04:50.060827 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2rg4s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"751d4d21-4eb8-4236-bca2-d81f094ff2f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa8d4e5672a07ea28ddba02eaaf603750cd3067ff47165776d2dc7f2ad42a290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa8d4e5672a07ea28ddba02eaaf603750cd3067ff47165776d2dc7f2ad42a290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87e685f0d2c857d143c3ee4b13adf2afe6796c40ae87e2a304b78783907a785a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87e685f0d2c857d143c3ee4b13adf2afe6796c40ae87e2a304b78783907a785a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://842a75d6b100069785dbaf413df3b610a5df28c9ec124100eb31c965985282fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://842a75d6b100069785dbaf413df3b610a5df28c9ec124100eb31c965985282fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4ac00f8ef346db3de91e7bec01a56eac9f27d5a501d6f686173ece4897b9694\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4ac00f8ef346db3de91e7bec01a56eac9f27d5a501d6f686173ece4897b9694\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3585437cbe0f17e5bf92a74123f0e6f13475b6e5d49b145f2b9b869559b53310\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3585437cbe0f17e5bf92a74123f0e6f13475b6e5d49b145f2b9b869559b53310\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e60aa96f2377e63e44e932efe51f79832b778b8ef41fd47e728a80286956c62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e60aa96f2377e63e44e932efe51f79832b778b8ef41fd47e728a80286956c62\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2rg4s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:50Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:50 crc kubenswrapper[4753]: I1205 17:04:50.081980 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://073ffcef7324b498f2cb47680b64bf44e474a7732a5c2b34661da22c3e0f6c17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://073ffcef7324b498f2cb47680b64bf44e474a7732a5c2b34661da22c3e0f6c17\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-98fvv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:50Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:50 crc kubenswrapper[4753]: I1205 17:04:50.100140 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-f6qn6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"693c26bb-e75f-4f7f-bd2a-bdf7dcb0af06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02ea0085a961c324f4a426d68972043f4b24b3aa03aed623ebc46bbafc857d1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g4w6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-f6qn6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:50Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:50 crc kubenswrapper[4753]: I1205 17:04:50.107302 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:50 crc kubenswrapper[4753]: I1205 17:04:50.107536 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:50 crc kubenswrapper[4753]: I1205 17:04:50.107624 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:50 crc kubenswrapper[4753]: I1205 17:04:50.107694 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:50 crc kubenswrapper[4753]: I1205 17:04:50.107751 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:50Z","lastTransitionTime":"2025-12-05T17:04:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:50 crc kubenswrapper[4753]: I1205 17:04:50.114290 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65284411-042e-46b9-8e55-cabc9e78a397\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e83b48779d4b8b709f9bd7351180040315002f738ada5ab034c883f87ef8734\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f6e27e109eeb52dae4e4b35bdf84e78f0786fc8d15c2498dd8d8c70c3d64e4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7886d72b05768ec1d515cfb2837f52ae16e82445b72a3016762308b80892d55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a662a8ca2ca9bcf02ace463bf26e44293b9776c69f3f1f84110392c3cab6e83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:50Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:50 crc kubenswrapper[4753]: I1205 17:04:50.127878 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://408690fbd818b50273c914cc463acec30ff77bbf7d26680bffa2451a1d1ddcf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:50Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:50 crc kubenswrapper[4753]: I1205 17:04:50.140200 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78c3b80f4d08dc9fd4215fc87bbd7331f765f301fa587f5fd4ded54cd8db6587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60c5d0ca3d26171050d592c3d76f0db4ca1a12344c40825a8e7c5fd579814c80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-khn68\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:50Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:50 crc kubenswrapper[4753]: I1205 17:04:50.211133 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:50 crc kubenswrapper[4753]: I1205 17:04:50.211188 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:50 crc kubenswrapper[4753]: I1205 17:04:50.211199 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:50 crc kubenswrapper[4753]: I1205 17:04:50.211215 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:50 crc kubenswrapper[4753]: I1205 17:04:50.211225 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:50Z","lastTransitionTime":"2025-12-05T17:04:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:50 crc kubenswrapper[4753]: I1205 17:04:50.315409 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:50 crc kubenswrapper[4753]: I1205 17:04:50.315471 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:50 crc kubenswrapper[4753]: I1205 17:04:50.315484 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:50 crc kubenswrapper[4753]: I1205 17:04:50.315506 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:50 crc kubenswrapper[4753]: I1205 17:04:50.315521 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:50Z","lastTransitionTime":"2025-12-05T17:04:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:50 crc kubenswrapper[4753]: I1205 17:04:50.418422 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:50 crc kubenswrapper[4753]: I1205 17:04:50.418497 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:50 crc kubenswrapper[4753]: I1205 17:04:50.418511 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:50 crc kubenswrapper[4753]: I1205 17:04:50.418536 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:50 crc kubenswrapper[4753]: I1205 17:04:50.418554 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:50Z","lastTransitionTime":"2025-12-05T17:04:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:50 crc kubenswrapper[4753]: I1205 17:04:50.521359 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:50 crc kubenswrapper[4753]: I1205 17:04:50.521405 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:50 crc kubenswrapper[4753]: I1205 17:04:50.521419 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:50 crc kubenswrapper[4753]: I1205 17:04:50.521440 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:50 crc kubenswrapper[4753]: I1205 17:04:50.521455 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:50Z","lastTransitionTime":"2025-12-05T17:04:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:50 crc kubenswrapper[4753]: I1205 17:04:50.672734 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:50 crc kubenswrapper[4753]: I1205 17:04:50.672769 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:50 crc kubenswrapper[4753]: I1205 17:04:50.672778 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:50 crc kubenswrapper[4753]: I1205 17:04:50.672791 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:50 crc kubenswrapper[4753]: I1205 17:04:50.672800 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:50Z","lastTransitionTime":"2025-12-05T17:04:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:50 crc kubenswrapper[4753]: I1205 17:04:50.721026 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:04:50 crc kubenswrapper[4753]: I1205 17:04:50.721037 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:04:50 crc kubenswrapper[4753]: E1205 17:04:50.721451 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:04:50 crc kubenswrapper[4753]: E1205 17:04:50.721861 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:04:50 crc kubenswrapper[4753]: I1205 17:04:50.774799 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:50 crc kubenswrapper[4753]: I1205 17:04:50.774836 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:50 crc kubenswrapper[4753]: I1205 17:04:50.774845 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:50 crc kubenswrapper[4753]: I1205 17:04:50.774859 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:50 crc kubenswrapper[4753]: I1205 17:04:50.774867 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:50Z","lastTransitionTime":"2025-12-05T17:04:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:50 crc kubenswrapper[4753]: I1205 17:04:50.879009 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:50 crc kubenswrapper[4753]: I1205 17:04:50.879517 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:50 crc kubenswrapper[4753]: I1205 17:04:50.879533 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:50 crc kubenswrapper[4753]: I1205 17:04:50.879560 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:50 crc kubenswrapper[4753]: I1205 17:04:50.879576 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:50Z","lastTransitionTime":"2025-12-05T17:04:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:50 crc kubenswrapper[4753]: I1205 17:04:50.918992 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-2rg4s" event={"ID":"751d4d21-4eb8-4236-bca2-d81f094ff2f8","Type":"ContainerStarted","Data":"1bda355ba818e87604cf7cfba53f0ee3116f2d4234e7d9631ec7e6571ed34030"} Dec 05 17:04:50 crc kubenswrapper[4753]: I1205 17:04:50.924789 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" event={"ID":"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a","Type":"ContainerStarted","Data":"c8c730b3d1006b876ae704a8e5c67fd685185640e9445b7c66dab106dc505e26"} Dec 05 17:04:50 crc kubenswrapper[4753]: I1205 17:04:50.925309 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" Dec 05 17:04:50 crc kubenswrapper[4753]: I1205 17:04:50.925371 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" Dec 05 17:04:50 crc kubenswrapper[4753]: I1205 17:04:50.932657 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://408690fbd818b50273c914cc463acec30ff77bbf7d26680bffa2451a1d1ddcf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:50Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:50 crc kubenswrapper[4753]: I1205 17:04:50.944758 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78c3b80f4d08dc9fd4215fc87bbd7331f765f301fa587f5fd4ded54cd8db6587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60c5d0ca3d26171050d592c3d76f0db4ca1a12344c40825a8e7c5fd579814c80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-khn68\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:50Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:50 crc kubenswrapper[4753]: I1205 17:04:50.961245 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" Dec 05 17:04:50 crc kubenswrapper[4753]: I1205 17:04:50.964104 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee37fdbe-64d5-4bb1-8522-932d83e0648e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://195b8bf18e6cf937def214e377ec9def799c19ea4d5f008b66cc42048656ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a5f37b26bfe7933088e08379662250e7b188efbbb5784d49e8d86262e10416\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0061bf86592b301a0a9398d2938e614f76616ed7ddcf310aa50c961632fb58b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20eb41ef00b7711fdfef0e4f3bd83b2fbdb193be96cafc0ef87f048271e7dd78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fea878c0275a3934d26327c13cf5a93ae3b881132788f3d031af5b382bbdd02\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 17:04:35.064086 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 17:04:35.066802 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1773129596/tls.crt::/tmp/serving-cert-1773129596/tls.key\\\\\\\"\\\\nI1205 17:04:40.565607 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 17:04:40.585450 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 17:04:40.585477 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 17:04:40.585506 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 17:04:40.585513 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:04:40.600592 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:04:40.600617 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600622 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600626 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:04:40.600629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:04:40.600632 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:04:40.600635 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:04:40.600704 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 17:04:40.602558 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0585e546af65a61265267b97083556b811ad35ee6c48fa84262b3f4f25eaf907\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:50Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:50 crc kubenswrapper[4753]: I1205 17:04:50.965516 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" Dec 05 17:04:50 crc kubenswrapper[4753]: I1205 17:04:50.977797 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8934d1ec7490b421708e4678d69a55cdd381563ada3d945d066fcf4abba49f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e799acff1e50a664e95e30896e435bc77a35c8560044c5f180e16c3534bcf828\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:50Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:50 crc kubenswrapper[4753]: I1205 17:04:50.982264 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:50 crc kubenswrapper[4753]: I1205 17:04:50.982305 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:50 crc kubenswrapper[4753]: I1205 17:04:50.982315 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:50 crc kubenswrapper[4753]: I1205 17:04:50.982333 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:50 crc kubenswrapper[4753]: I1205 17:04:50.982348 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:50Z","lastTransitionTime":"2025-12-05T17:04:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:50 crc kubenswrapper[4753]: I1205 17:04:50.993248 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c52dbe30298514a572fd7cf4a301357ef0290af5894be7f9d08aff6c3fbe85a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:50Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.012879 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hpl8r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b2cc49130697a15123f202462d2f0781903baea370d4906d22d1b39c23320d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmzrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hpl8r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:51Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.025906 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:51Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.039310 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:51Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.054295 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:51Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.065181 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vj5f7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f38be29-f040-4e7d-9026-36929c0c5cda\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b5d403644e00b367518058ecf22ed9913df9a1353e2fede8802deaa288a48e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc2cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vj5f7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:51Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.080808 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-f6qn6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"693c26bb-e75f-4f7f-bd2a-bdf7dcb0af06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02ea0085a961c324f4a426d68972043f4b24b3aa03aed623ebc46bbafc857d1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g4w6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-f6qn6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:51Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.084687 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.084816 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.084888 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.084956 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.085011 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:51Z","lastTransitionTime":"2025-12-05T17:04:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.095810 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65284411-042e-46b9-8e55-cabc9e78a397\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e83b48779d4b8b709f9bd7351180040315002f738ada5ab034c883f87ef8734\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f6e27e109eeb52dae4e4b35bdf84e78f0786fc8d15c2498dd8d8c70c3d64e4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7886d72b05768ec1d515cfb2837f52ae16e82445b72a3016762308b80892d55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a662a8ca2ca9bcf02ace463bf26e44293b9776c69f3f1f84110392c3cab6e83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:51Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.109267 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2rg4s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"751d4d21-4eb8-4236-bca2-d81f094ff2f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bda355ba818e87604cf7cfba53f0ee3116f2d4234e7d9631ec7e6571ed34030\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa8d4e5672a07ea28ddba02eaaf603750cd3067ff47165776d2dc7f2ad42a290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa8d4e5672a07ea28ddba02eaaf603750cd3067ff47165776d2dc7f2ad42a290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87e685f0d2c857d143c3ee4b13adf2afe6796c40ae87e2a304b78783907a785a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87e685f0d2c857d143c3ee4b13adf2afe6796c40ae87e2a304b78783907a785a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://842a75d6b100069785dbaf413df3b610a5df28c9ec124100eb31c965985282fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://842a75d6b100069785dbaf413df3b610a5df28c9ec124100eb31c965985282fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4ac00f8ef346db3de91e7bec01a56eac9f27d5a501d6f686173ece4897b9694\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4ac00f8ef346db3de91e7bec01a56eac9f27d5a501d6f686173ece4897b9694\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3585437cbe0f17e5bf92a74123f0e6f13475b6e5d49b145f2b9b869559b53310\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3585437cbe0f17e5bf92a74123f0e6f13475b6e5d49b145f2b9b869559b53310\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e60aa96f2377e63e44e932efe51f79832b778b8ef41fd47e728a80286956c62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e60aa96f2377e63e44e932efe51f79832b778b8ef41fd47e728a80286956c62\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2rg4s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:51Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.126181 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://073ffcef7324b498f2cb47680b64bf44e474a7732a5c2b34661da22c3e0f6c17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://073ffcef7324b498f2cb47680b64bf44e474a7732a5c2b34661da22c3e0f6c17\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-98fvv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:51Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.136820 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://408690fbd818b50273c914cc463acec30ff77bbf7d26680bffa2451a1d1ddcf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:51Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.149666 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78c3b80f4d08dc9fd4215fc87bbd7331f765f301fa587f5fd4ded54cd8db6587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60c5d0ca3d26171050d592c3d76f0db4ca1a12344c40825a8e7c5fd579814c80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-khn68\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:51Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.163227 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee37fdbe-64d5-4bb1-8522-932d83e0648e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://195b8bf18e6cf937def214e377ec9def799c19ea4d5f008b66cc42048656ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a5f37b26bfe7933088e08379662250e7b188efbbb5784d49e8d86262e10416\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0061bf86592b301a0a9398d2938e614f76616ed7ddcf310aa50c961632fb58b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20eb41ef00b7711fdfef0e4f3bd83b2fbdb193be96cafc0ef87f048271e7dd78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fea878c0275a3934d26327c13cf5a93ae3b881132788f3d031af5b382bbdd02\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 17:04:35.064086 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 17:04:35.066802 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1773129596/tls.crt::/tmp/serving-cert-1773129596/tls.key\\\\\\\"\\\\nI1205 17:04:40.565607 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 17:04:40.585450 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 17:04:40.585477 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 17:04:40.585506 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 17:04:40.585513 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:04:40.600592 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:04:40.600617 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600622 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600626 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:04:40.600629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:04:40.600632 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:04:40.600635 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:04:40.600704 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 17:04:40.602558 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0585e546af65a61265267b97083556b811ad35ee6c48fa84262b3f4f25eaf907\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:51Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.177731 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8934d1ec7490b421708e4678d69a55cdd381563ada3d945d066fcf4abba49f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e799acff1e50a664e95e30896e435bc77a35c8560044c5f180e16c3534bcf828\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:51Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.186916 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.186946 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.186955 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.186968 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.186978 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:51Z","lastTransitionTime":"2025-12-05T17:04:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.189991 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c52dbe30298514a572fd7cf4a301357ef0290af5894be7f9d08aff6c3fbe85a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:51Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.211517 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hpl8r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b2cc49130697a15123f202462d2f0781903baea370d4906d22d1b39c23320d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmzrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hpl8r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:51Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.223157 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:51Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.235110 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:51Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.254714 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:51Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.265769 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vj5f7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f38be29-f040-4e7d-9026-36929c0c5cda\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b5d403644e00b367518058ecf22ed9913df9a1353e2fede8802deaa288a48e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc2cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vj5f7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:51Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.289382 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.289562 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.289579 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.289599 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.289612 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:51Z","lastTransitionTime":"2025-12-05T17:04:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.293048 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f6585b3b62fc4a78f89b3413326d6d5259cb8c338936f2d5def6185d81d46f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cb72322d436f10b006a4bb7a91b255451aba90e86100fdf249be6443159bd89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d0273d889da0bd6ea3db8040da2eab62fc750f308cb9a62dee71b6c9f2fadd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e79e99eb44ff95c2f9929326313e41629e6e03ebc068537bd27bc4c89ad5b0ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20bcab2c2912120c819bacdf478db82a78500908125e6efadaef1f3409eb0d49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ead15f2a3179f03d770beadcaa1374561d5aebb4874615a334edccff0c2f300\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8c730b3d1006b876ae704a8e5c67fd685185640e9445b7c66dab106dc505e26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://899db0b5d45776b0560a2908ce63ec8ea2b458e2e492189737e930688b4ed2e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://073ffcef7324b498f2cb47680b64bf44e474a7732a5c2b34661da22c3e0f6c17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://073ffcef7324b498f2cb47680b64bf44e474a7732a5c2b34661da22c3e0f6c17\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-98fvv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:51Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.306800 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-f6qn6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"693c26bb-e75f-4f7f-bd2a-bdf7dcb0af06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02ea0085a961c324f4a426d68972043f4b24b3aa03aed623ebc46bbafc857d1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g4w6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-f6qn6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:51Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.322300 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65284411-042e-46b9-8e55-cabc9e78a397\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e83b48779d4b8b709f9bd7351180040315002f738ada5ab034c883f87ef8734\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f6e27e109eeb52dae4e4b35bdf84e78f0786fc8d15c2498dd8d8c70c3d64e4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7886d72b05768ec1d515cfb2837f52ae16e82445b72a3016762308b80892d55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a662a8ca2ca9bcf02ace463bf26e44293b9776c69f3f1f84110392c3cab6e83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:51Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.336620 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2rg4s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"751d4d21-4eb8-4236-bca2-d81f094ff2f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bda355ba818e87604cf7cfba53f0ee3116f2d4234e7d9631ec7e6571ed34030\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa8d4e5672a07ea28ddba02eaaf603750cd3067ff47165776d2dc7f2ad42a290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa8d4e5672a07ea28ddba02eaaf603750cd3067ff47165776d2dc7f2ad42a290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87e685f0d2c857d143c3ee4b13adf2afe6796c40ae87e2a304b78783907a785a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87e685f0d2c857d143c3ee4b13adf2afe6796c40ae87e2a304b78783907a785a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://842a75d6b100069785dbaf413df3b610a5df28c9ec124100eb31c965985282fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://842a75d6b100069785dbaf413df3b610a5df28c9ec124100eb31c965985282fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4ac00f8ef346db3de91e7bec01a56eac9f27d5a501d6f686173ece4897b9694\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4ac00f8ef346db3de91e7bec01a56eac9f27d5a501d6f686173ece4897b9694\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3585437cbe0f17e5bf92a74123f0e6f13475b6e5d49b145f2b9b869559b53310\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3585437cbe0f17e5bf92a74123f0e6f13475b6e5d49b145f2b9b869559b53310\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e60aa96f2377e63e44e932efe51f79832b778b8ef41fd47e728a80286956c62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e60aa96f2377e63e44e932efe51f79832b778b8ef41fd47e728a80286956c62\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2rg4s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:51Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.391950 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.391994 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.392006 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.392024 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.392033 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:51Z","lastTransitionTime":"2025-12-05T17:04:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.493983 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.494018 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.494029 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.494045 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.494057 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:51Z","lastTransitionTime":"2025-12-05T17:04:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.596105 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.596162 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.596175 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.596192 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.596204 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:51Z","lastTransitionTime":"2025-12-05T17:04:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.698493 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.698543 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.698551 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.698564 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.698578 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:51Z","lastTransitionTime":"2025-12-05T17:04:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.719884 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:04:51 crc kubenswrapper[4753]: E1205 17:04:51.720006 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.734631 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee37fdbe-64d5-4bb1-8522-932d83e0648e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://195b8bf18e6cf937def214e377ec9def799c19ea4d5f008b66cc42048656ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a5f37b26bfe7933088e08379662250e7b188efbbb5784d49e8d86262e10416\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0061bf86592b301a0a9398d2938e614f76616ed7ddcf310aa50c961632fb58b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20eb41ef00b7711fdfef0e4f3bd83b2fbdb193be96cafc0ef87f048271e7dd78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fea878c0275a3934d26327c13cf5a93ae3b881132788f3d031af5b382bbdd02\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 17:04:35.064086 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 17:04:35.066802 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1773129596/tls.crt::/tmp/serving-cert-1773129596/tls.key\\\\\\\"\\\\nI1205 17:04:40.565607 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 17:04:40.585450 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 17:04:40.585477 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 17:04:40.585506 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 17:04:40.585513 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:04:40.600592 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:04:40.600617 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600622 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600626 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:04:40.600629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:04:40.600632 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:04:40.600635 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:04:40.600704 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 17:04:40.602558 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0585e546af65a61265267b97083556b811ad35ee6c48fa84262b3f4f25eaf907\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:51Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.745041 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8934d1ec7490b421708e4678d69a55cdd381563ada3d945d066fcf4abba49f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e799acff1e50a664e95e30896e435bc77a35c8560044c5f180e16c3534bcf828\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:51Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.757960 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c52dbe30298514a572fd7cf4a301357ef0290af5894be7f9d08aff6c3fbe85a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:51Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.771950 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hpl8r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b2cc49130697a15123f202462d2f0781903baea370d4906d22d1b39c23320d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmzrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hpl8r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:51Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.784804 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:51Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.796934 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:51Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.800421 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.800451 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.800460 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.800475 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.800484 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:51Z","lastTransitionTime":"2025-12-05T17:04:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.808510 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:51Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.819075 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vj5f7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f38be29-f040-4e7d-9026-36929c0c5cda\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b5d403644e00b367518058ecf22ed9913df9a1353e2fede8802deaa288a48e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc2cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vj5f7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:51Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.837868 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2rg4s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"751d4d21-4eb8-4236-bca2-d81f094ff2f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bda355ba818e87604cf7cfba53f0ee3116f2d4234e7d9631ec7e6571ed34030\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa8d4e5672a07ea28ddba02eaaf603750cd3067ff47165776d2dc7f2ad42a290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa8d4e5672a07ea28ddba02eaaf603750cd3067ff47165776d2dc7f2ad42a290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87e685f0d2c857d143c3ee4b13adf2afe6796c40ae87e2a304b78783907a785a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87e685f0d2c857d143c3ee4b13adf2afe6796c40ae87e2a304b78783907a785a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://842a75d6b100069785dbaf413df3b610a5df28c9ec124100eb31c965985282fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://842a75d6b100069785dbaf413df3b610a5df28c9ec124100eb31c965985282fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4ac00f8ef346db3de91e7bec01a56eac9f27d5a501d6f686173ece4897b9694\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4ac00f8ef346db3de91e7bec01a56eac9f27d5a501d6f686173ece4897b9694\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3585437cbe0f17e5bf92a74123f0e6f13475b6e5d49b145f2b9b869559b53310\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3585437cbe0f17e5bf92a74123f0e6f13475b6e5d49b145f2b9b869559b53310\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e60aa96f2377e63e44e932efe51f79832b778b8ef41fd47e728a80286956c62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e60aa96f2377e63e44e932efe51f79832b778b8ef41fd47e728a80286956c62\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2rg4s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:51Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.863295 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f6585b3b62fc4a78f89b3413326d6d5259cb8c338936f2d5def6185d81d46f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cb72322d436f10b006a4bb7a91b255451aba90e86100fdf249be6443159bd89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d0273d889da0bd6ea3db8040da2eab62fc750f308cb9a62dee71b6c9f2fadd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e79e99eb44ff95c2f9929326313e41629e6e03ebc068537bd27bc4c89ad5b0ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20bcab2c2912120c819bacdf478db82a78500908125e6efadaef1f3409eb0d49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ead15f2a3179f03d770beadcaa1374561d5aebb4874615a334edccff0c2f300\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8c730b3d1006b876ae704a8e5c67fd685185640e9445b7c66dab106dc505e26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://899db0b5d45776b0560a2908ce63ec8ea2b458e2e492189737e930688b4ed2e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://073ffcef7324b498f2cb47680b64bf44e474a7732a5c2b34661da22c3e0f6c17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://073ffcef7324b498f2cb47680b64bf44e474a7732a5c2b34661da22c3e0f6c17\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-98fvv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:51Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.875719 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-f6qn6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"693c26bb-e75f-4f7f-bd2a-bdf7dcb0af06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02ea0085a961c324f4a426d68972043f4b24b3aa03aed623ebc46bbafc857d1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g4w6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-f6qn6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:51Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.889911 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65284411-042e-46b9-8e55-cabc9e78a397\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e83b48779d4b8b709f9bd7351180040315002f738ada5ab034c883f87ef8734\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f6e27e109eeb52dae4e4b35bdf84e78f0786fc8d15c2498dd8d8c70c3d64e4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7886d72b05768ec1d515cfb2837f52ae16e82445b72a3016762308b80892d55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a662a8ca2ca9bcf02ace463bf26e44293b9776c69f3f1f84110392c3cab6e83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:51Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.903127 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.903180 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.903192 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.903211 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.903220 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:51Z","lastTransitionTime":"2025-12-05T17:04:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.903916 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://408690fbd818b50273c914cc463acec30ff77bbf7d26680bffa2451a1d1ddcf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:51Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.919380 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78c3b80f4d08dc9fd4215fc87bbd7331f765f301fa587f5fd4ded54cd8db6587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60c5d0ca3d26171050d592c3d76f0db4ca1a12344c40825a8e7c5fd579814c80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-khn68\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:51Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:51 crc kubenswrapper[4753]: I1205 17:04:51.929379 4753 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 17:04:52 crc kubenswrapper[4753]: I1205 17:04:52.005501 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:52 crc kubenswrapper[4753]: I1205 17:04:52.005545 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:52 crc kubenswrapper[4753]: I1205 17:04:52.005555 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:52 crc kubenswrapper[4753]: I1205 17:04:52.005572 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:52 crc kubenswrapper[4753]: I1205 17:04:52.005583 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:52Z","lastTransitionTime":"2025-12-05T17:04:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:52 crc kubenswrapper[4753]: I1205 17:04:52.107808 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:52 crc kubenswrapper[4753]: I1205 17:04:52.107839 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:52 crc kubenswrapper[4753]: I1205 17:04:52.107849 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:52 crc kubenswrapper[4753]: I1205 17:04:52.107863 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:52 crc kubenswrapper[4753]: I1205 17:04:52.107874 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:52Z","lastTransitionTime":"2025-12-05T17:04:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:52 crc kubenswrapper[4753]: I1205 17:04:52.210570 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:52 crc kubenswrapper[4753]: I1205 17:04:52.210596 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:52 crc kubenswrapper[4753]: I1205 17:04:52.210604 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:52 crc kubenswrapper[4753]: I1205 17:04:52.210617 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:52 crc kubenswrapper[4753]: I1205 17:04:52.210626 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:52Z","lastTransitionTime":"2025-12-05T17:04:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:52 crc kubenswrapper[4753]: I1205 17:04:52.312798 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:52 crc kubenswrapper[4753]: I1205 17:04:52.312836 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:52 crc kubenswrapper[4753]: I1205 17:04:52.312846 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:52 crc kubenswrapper[4753]: I1205 17:04:52.312862 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:52 crc kubenswrapper[4753]: I1205 17:04:52.312875 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:52Z","lastTransitionTime":"2025-12-05T17:04:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:52 crc kubenswrapper[4753]: I1205 17:04:52.415608 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:52 crc kubenswrapper[4753]: I1205 17:04:52.415643 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:52 crc kubenswrapper[4753]: I1205 17:04:52.415832 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:52 crc kubenswrapper[4753]: I1205 17:04:52.415847 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:52 crc kubenswrapper[4753]: I1205 17:04:52.415857 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:52Z","lastTransitionTime":"2025-12-05T17:04:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:52 crc kubenswrapper[4753]: I1205 17:04:52.518657 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:52 crc kubenswrapper[4753]: I1205 17:04:52.518684 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:52 crc kubenswrapper[4753]: I1205 17:04:52.518692 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:52 crc kubenswrapper[4753]: I1205 17:04:52.518705 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:52 crc kubenswrapper[4753]: I1205 17:04:52.518712 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:52Z","lastTransitionTime":"2025-12-05T17:04:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:52 crc kubenswrapper[4753]: I1205 17:04:52.620555 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:52 crc kubenswrapper[4753]: I1205 17:04:52.620587 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:52 crc kubenswrapper[4753]: I1205 17:04:52.620595 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:52 crc kubenswrapper[4753]: I1205 17:04:52.620608 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:52 crc kubenswrapper[4753]: I1205 17:04:52.620617 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:52Z","lastTransitionTime":"2025-12-05T17:04:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:52 crc kubenswrapper[4753]: I1205 17:04:52.720192 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:04:52 crc kubenswrapper[4753]: E1205 17:04:52.720346 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:04:52 crc kubenswrapper[4753]: I1205 17:04:52.721257 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:04:52 crc kubenswrapper[4753]: E1205 17:04:52.721432 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:04:52 crc kubenswrapper[4753]: I1205 17:04:52.722789 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:52 crc kubenswrapper[4753]: I1205 17:04:52.722806 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:52 crc kubenswrapper[4753]: I1205 17:04:52.722815 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:52 crc kubenswrapper[4753]: I1205 17:04:52.722840 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:52 crc kubenswrapper[4753]: I1205 17:04:52.722850 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:52Z","lastTransitionTime":"2025-12-05T17:04:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:52 crc kubenswrapper[4753]: I1205 17:04:52.824787 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:52 crc kubenswrapper[4753]: I1205 17:04:52.824837 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:52 crc kubenswrapper[4753]: I1205 17:04:52.824849 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:52 crc kubenswrapper[4753]: I1205 17:04:52.824865 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:52 crc kubenswrapper[4753]: I1205 17:04:52.824876 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:52Z","lastTransitionTime":"2025-12-05T17:04:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:52 crc kubenswrapper[4753]: I1205 17:04:52.927493 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:52 crc kubenswrapper[4753]: I1205 17:04:52.927539 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:52 crc kubenswrapper[4753]: I1205 17:04:52.927550 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:52 crc kubenswrapper[4753]: I1205 17:04:52.927565 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:52 crc kubenswrapper[4753]: I1205 17:04:52.927574 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:52Z","lastTransitionTime":"2025-12-05T17:04:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:52 crc kubenswrapper[4753]: I1205 17:04:52.931507 4753 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 17:04:53 crc kubenswrapper[4753]: I1205 17:04:53.030118 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:53 crc kubenswrapper[4753]: I1205 17:04:53.030180 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:53 crc kubenswrapper[4753]: I1205 17:04:53.030189 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:53 crc kubenswrapper[4753]: I1205 17:04:53.030203 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:53 crc kubenswrapper[4753]: I1205 17:04:53.030216 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:53Z","lastTransitionTime":"2025-12-05T17:04:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:53 crc kubenswrapper[4753]: I1205 17:04:53.132555 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:53 crc kubenswrapper[4753]: I1205 17:04:53.132591 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:53 crc kubenswrapper[4753]: I1205 17:04:53.132600 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:53 crc kubenswrapper[4753]: I1205 17:04:53.132613 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:53 crc kubenswrapper[4753]: I1205 17:04:53.132624 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:53Z","lastTransitionTime":"2025-12-05T17:04:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:53 crc kubenswrapper[4753]: I1205 17:04:53.235506 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:53 crc kubenswrapper[4753]: I1205 17:04:53.235550 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:53 crc kubenswrapper[4753]: I1205 17:04:53.235564 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:53 crc kubenswrapper[4753]: I1205 17:04:53.235581 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:53 crc kubenswrapper[4753]: I1205 17:04:53.235591 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:53Z","lastTransitionTime":"2025-12-05T17:04:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:53 crc kubenswrapper[4753]: I1205 17:04:53.338905 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:53 crc kubenswrapper[4753]: I1205 17:04:53.338963 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:53 crc kubenswrapper[4753]: I1205 17:04:53.338980 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:53 crc kubenswrapper[4753]: I1205 17:04:53.339001 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:53 crc kubenswrapper[4753]: I1205 17:04:53.339018 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:53Z","lastTransitionTime":"2025-12-05T17:04:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:53 crc kubenswrapper[4753]: I1205 17:04:53.441419 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:53 crc kubenswrapper[4753]: I1205 17:04:53.441459 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:53 crc kubenswrapper[4753]: I1205 17:04:53.441467 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:53 crc kubenswrapper[4753]: I1205 17:04:53.441481 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:53 crc kubenswrapper[4753]: I1205 17:04:53.441491 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:53Z","lastTransitionTime":"2025-12-05T17:04:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:53 crc kubenswrapper[4753]: I1205 17:04:53.544044 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:53 crc kubenswrapper[4753]: I1205 17:04:53.544095 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:53 crc kubenswrapper[4753]: I1205 17:04:53.544107 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:53 crc kubenswrapper[4753]: I1205 17:04:53.544125 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:53 crc kubenswrapper[4753]: I1205 17:04:53.544165 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:53Z","lastTransitionTime":"2025-12-05T17:04:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:53 crc kubenswrapper[4753]: I1205 17:04:53.646469 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:53 crc kubenswrapper[4753]: I1205 17:04:53.646509 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:53 crc kubenswrapper[4753]: I1205 17:04:53.646522 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:53 crc kubenswrapper[4753]: I1205 17:04:53.646538 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:53 crc kubenswrapper[4753]: I1205 17:04:53.646549 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:53Z","lastTransitionTime":"2025-12-05T17:04:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:53 crc kubenswrapper[4753]: I1205 17:04:53.719833 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:04:53 crc kubenswrapper[4753]: E1205 17:04:53.720007 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:04:53 crc kubenswrapper[4753]: I1205 17:04:53.749724 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:53 crc kubenswrapper[4753]: I1205 17:04:53.749807 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:53 crc kubenswrapper[4753]: I1205 17:04:53.749822 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:53 crc kubenswrapper[4753]: I1205 17:04:53.749839 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:53 crc kubenswrapper[4753]: I1205 17:04:53.749851 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:53Z","lastTransitionTime":"2025-12-05T17:04:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:53 crc kubenswrapper[4753]: I1205 17:04:53.853690 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:53 crc kubenswrapper[4753]: I1205 17:04:53.853732 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:53 crc kubenswrapper[4753]: I1205 17:04:53.853741 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:53 crc kubenswrapper[4753]: I1205 17:04:53.853755 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:53 crc kubenswrapper[4753]: I1205 17:04:53.853766 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:53Z","lastTransitionTime":"2025-12-05T17:04:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:53 crc kubenswrapper[4753]: I1205 17:04:53.937789 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-98fvv_f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a/ovnkube-controller/0.log" Dec 05 17:04:53 crc kubenswrapper[4753]: I1205 17:04:53.942542 4753 generic.go:334] "Generic (PLEG): container finished" podID="f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" containerID="c8c730b3d1006b876ae704a8e5c67fd685185640e9445b7c66dab106dc505e26" exitCode=1 Dec 05 17:04:53 crc kubenswrapper[4753]: I1205 17:04:53.942609 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" event={"ID":"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a","Type":"ContainerDied","Data":"c8c730b3d1006b876ae704a8e5c67fd685185640e9445b7c66dab106dc505e26"} Dec 05 17:04:53 crc kubenswrapper[4753]: I1205 17:04:53.943613 4753 scope.go:117] "RemoveContainer" containerID="c8c730b3d1006b876ae704a8e5c67fd685185640e9445b7c66dab106dc505e26" Dec 05 17:04:53 crc kubenswrapper[4753]: I1205 17:04:53.956497 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:53 crc kubenswrapper[4753]: I1205 17:04:53.956549 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:53 crc kubenswrapper[4753]: I1205 17:04:53.956568 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:53 crc kubenswrapper[4753]: I1205 17:04:53.956589 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:53 crc kubenswrapper[4753]: I1205 17:04:53.956604 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:53Z","lastTransitionTime":"2025-12-05T17:04:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:53 crc kubenswrapper[4753]: I1205 17:04:53.963404 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-f6qn6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"693c26bb-e75f-4f7f-bd2a-bdf7dcb0af06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02ea0085a961c324f4a426d68972043f4b24b3aa03aed623ebc46bbafc857d1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g4w6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-f6qn6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:53Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:53 crc kubenswrapper[4753]: I1205 17:04:53.984752 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65284411-042e-46b9-8e55-cabc9e78a397\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e83b48779d4b8b709f9bd7351180040315002f738ada5ab034c883f87ef8734\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f6e27e109eeb52dae4e4b35bdf84e78f0786fc8d15c2498dd8d8c70c3d64e4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7886d72b05768ec1d515cfb2837f52ae16e82445b72a3016762308b80892d55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a662a8ca2ca9bcf02ace463bf26e44293b9776c69f3f1f84110392c3cab6e83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:53Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:54 crc kubenswrapper[4753]: I1205 17:04:54.003057 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2rg4s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"751d4d21-4eb8-4236-bca2-d81f094ff2f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bda355ba818e87604cf7cfba53f0ee3116f2d4234e7d9631ec7e6571ed34030\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa8d4e5672a07ea28ddba02eaaf603750cd3067ff47165776d2dc7f2ad42a290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa8d4e5672a07ea28ddba02eaaf603750cd3067ff47165776d2dc7f2ad42a290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87e685f0d2c857d143c3ee4b13adf2afe6796c40ae87e2a304b78783907a785a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87e685f0d2c857d143c3ee4b13adf2afe6796c40ae87e2a304b78783907a785a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://842a75d6b100069785dbaf413df3b610a5df28c9ec124100eb31c965985282fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://842a75d6b100069785dbaf413df3b610a5df28c9ec124100eb31c965985282fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4ac00f8ef346db3de91e7bec01a56eac9f27d5a501d6f686173ece4897b9694\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4ac00f8ef346db3de91e7bec01a56eac9f27d5a501d6f686173ece4897b9694\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3585437cbe0f17e5bf92a74123f0e6f13475b6e5d49b145f2b9b869559b53310\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3585437cbe0f17e5bf92a74123f0e6f13475b6e5d49b145f2b9b869559b53310\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e60aa96f2377e63e44e932efe51f79832b778b8ef41fd47e728a80286956c62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e60aa96f2377e63e44e932efe51f79832b778b8ef41fd47e728a80286956c62\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2rg4s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:54Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:54 crc kubenswrapper[4753]: I1205 17:04:54.021568 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f6585b3b62fc4a78f89b3413326d6d5259cb8c338936f2d5def6185d81d46f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cb72322d436f10b006a4bb7a91b255451aba90e86100fdf249be6443159bd89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d0273d889da0bd6ea3db8040da2eab62fc750f308cb9a62dee71b6c9f2fadd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e79e99eb44ff95c2f9929326313e41629e6e03ebc068537bd27bc4c89ad5b0ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20bcab2c2912120c819bacdf478db82a78500908125e6efadaef1f3409eb0d49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ead15f2a3179f03d770beadcaa1374561d5aebb4874615a334edccff0c2f300\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c8c730b3d1006b876ae704a8e5c67fd685185640e9445b7c66dab106dc505e26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c8c730b3d1006b876ae704a8e5c67fd685185640e9445b7c66dab106dc505e26\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:04:53Z\\\",\\\"message\\\":\\\":53.045032 6073 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 17:04:53.045574 6073 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 17:04:53.045635 6073 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 17:04:53.045700 6073 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 17:04:53.046252 6073 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 17:04:53.046277 6073 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1205 17:04:53.046281 6073 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1205 17:04:53.046293 6073 factory.go:656] Stopping watch factory\\\\nI1205 17:04:53.046306 6073 ovnkube.go:599] Stopped ovnkube\\\\nI1205 17:04:53.046307 6073 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 17:04:53.046324 6073 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://899db0b5d45776b0560a2908ce63ec8ea2b458e2e492189737e930688b4ed2e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://073ffcef7324b498f2cb47680b64bf44e474a7732a5c2b34661da22c3e0f6c17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://073ffcef7324b498f2cb47680b64bf44e474a7732a5c2b34661da22c3e0f6c17\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-98fvv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:54Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:54 crc kubenswrapper[4753]: I1205 17:04:54.032969 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://408690fbd818b50273c914cc463acec30ff77bbf7d26680bffa2451a1d1ddcf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:54Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:54 crc kubenswrapper[4753]: I1205 17:04:54.047397 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78c3b80f4d08dc9fd4215fc87bbd7331f765f301fa587f5fd4ded54cd8db6587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60c5d0ca3d26171050d592c3d76f0db4ca1a12344c40825a8e7c5fd579814c80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-khn68\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:54Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:54 crc kubenswrapper[4753]: I1205 17:04:54.059879 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:54 crc kubenswrapper[4753]: I1205 17:04:54.059933 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:54 crc kubenswrapper[4753]: I1205 17:04:54.059948 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:54 crc kubenswrapper[4753]: I1205 17:04:54.059968 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:54 crc kubenswrapper[4753]: I1205 17:04:54.059982 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:54Z","lastTransitionTime":"2025-12-05T17:04:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:54 crc kubenswrapper[4753]: I1205 17:04:54.061966 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee37fdbe-64d5-4bb1-8522-932d83e0648e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://195b8bf18e6cf937def214e377ec9def799c19ea4d5f008b66cc42048656ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a5f37b26bfe7933088e08379662250e7b188efbbb5784d49e8d86262e10416\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0061bf86592b301a0a9398d2938e614f76616ed7ddcf310aa50c961632fb58b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20eb41ef00b7711fdfef0e4f3bd83b2fbdb193be96cafc0ef87f048271e7dd78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fea878c0275a3934d26327c13cf5a93ae3b881132788f3d031af5b382bbdd02\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 17:04:35.064086 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 17:04:35.066802 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1773129596/tls.crt::/tmp/serving-cert-1773129596/tls.key\\\\\\\"\\\\nI1205 17:04:40.565607 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 17:04:40.585450 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 17:04:40.585477 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 17:04:40.585506 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 17:04:40.585513 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:04:40.600592 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:04:40.600617 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600622 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600626 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:04:40.600629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:04:40.600632 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:04:40.600635 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:04:40.600704 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 17:04:40.602558 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0585e546af65a61265267b97083556b811ad35ee6c48fa84262b3f4f25eaf907\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:54Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:54 crc kubenswrapper[4753]: I1205 17:04:54.081279 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8934d1ec7490b421708e4678d69a55cdd381563ada3d945d066fcf4abba49f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e799acff1e50a664e95e30896e435bc77a35c8560044c5f180e16c3534bcf828\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:54Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:54 crc kubenswrapper[4753]: I1205 17:04:54.100976 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c52dbe30298514a572fd7cf4a301357ef0290af5894be7f9d08aff6c3fbe85a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:54Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:54 crc kubenswrapper[4753]: I1205 17:04:54.117953 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hpl8r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b2cc49130697a15123f202462d2f0781903baea370d4906d22d1b39c23320d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmzrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hpl8r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:54Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:54 crc kubenswrapper[4753]: I1205 17:04:54.136401 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:54Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:54 crc kubenswrapper[4753]: I1205 17:04:54.151897 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:54Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:54 crc kubenswrapper[4753]: I1205 17:04:54.163000 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:54 crc kubenswrapper[4753]: I1205 17:04:54.163045 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:54 crc kubenswrapper[4753]: I1205 17:04:54.163058 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:54 crc kubenswrapper[4753]: I1205 17:04:54.163075 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:54 crc kubenswrapper[4753]: I1205 17:04:54.163087 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:54Z","lastTransitionTime":"2025-12-05T17:04:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:54 crc kubenswrapper[4753]: I1205 17:04:54.170427 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:54Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:54 crc kubenswrapper[4753]: I1205 17:04:54.183135 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vj5f7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f38be29-f040-4e7d-9026-36929c0c5cda\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b5d403644e00b367518058ecf22ed9913df9a1353e2fede8802deaa288a48e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc2cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vj5f7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:54Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:54 crc kubenswrapper[4753]: I1205 17:04:54.265927 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:54 crc kubenswrapper[4753]: I1205 17:04:54.265955 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:54 crc kubenswrapper[4753]: I1205 17:04:54.265962 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:54 crc kubenswrapper[4753]: I1205 17:04:54.265975 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:54 crc kubenswrapper[4753]: I1205 17:04:54.265984 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:54Z","lastTransitionTime":"2025-12-05T17:04:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:54 crc kubenswrapper[4753]: I1205 17:04:54.369186 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:54 crc kubenswrapper[4753]: I1205 17:04:54.369234 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:54 crc kubenswrapper[4753]: I1205 17:04:54.369245 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:54 crc kubenswrapper[4753]: I1205 17:04:54.369264 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:54 crc kubenswrapper[4753]: I1205 17:04:54.369274 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:54Z","lastTransitionTime":"2025-12-05T17:04:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:54 crc kubenswrapper[4753]: I1205 17:04:54.472129 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:54 crc kubenswrapper[4753]: I1205 17:04:54.472200 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:54 crc kubenswrapper[4753]: I1205 17:04:54.472232 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:54 crc kubenswrapper[4753]: I1205 17:04:54.472250 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:54 crc kubenswrapper[4753]: I1205 17:04:54.472258 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:54Z","lastTransitionTime":"2025-12-05T17:04:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:54 crc kubenswrapper[4753]: I1205 17:04:54.574979 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:54 crc kubenswrapper[4753]: I1205 17:04:54.575015 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:54 crc kubenswrapper[4753]: I1205 17:04:54.575024 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:54 crc kubenswrapper[4753]: I1205 17:04:54.575039 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:54 crc kubenswrapper[4753]: I1205 17:04:54.575049 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:54Z","lastTransitionTime":"2025-12-05T17:04:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:54 crc kubenswrapper[4753]: I1205 17:04:54.678022 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:54 crc kubenswrapper[4753]: I1205 17:04:54.678063 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:54 crc kubenswrapper[4753]: I1205 17:04:54.678071 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:54 crc kubenswrapper[4753]: I1205 17:04:54.678087 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:54 crc kubenswrapper[4753]: I1205 17:04:54.678096 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:54Z","lastTransitionTime":"2025-12-05T17:04:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:54 crc kubenswrapper[4753]: I1205 17:04:54.719519 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:04:54 crc kubenswrapper[4753]: I1205 17:04:54.719519 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:04:54 crc kubenswrapper[4753]: E1205 17:04:54.719655 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:04:54 crc kubenswrapper[4753]: E1205 17:04:54.719712 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:04:54 crc kubenswrapper[4753]: I1205 17:04:54.780739 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:54 crc kubenswrapper[4753]: I1205 17:04:54.780783 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:54 crc kubenswrapper[4753]: I1205 17:04:54.780797 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:54 crc kubenswrapper[4753]: I1205 17:04:54.780815 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:54 crc kubenswrapper[4753]: I1205 17:04:54.780824 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:54Z","lastTransitionTime":"2025-12-05T17:04:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:54 crc kubenswrapper[4753]: I1205 17:04:54.883266 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:54 crc kubenswrapper[4753]: I1205 17:04:54.883303 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:54 crc kubenswrapper[4753]: I1205 17:04:54.883312 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:54 crc kubenswrapper[4753]: I1205 17:04:54.883327 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:54 crc kubenswrapper[4753]: I1205 17:04:54.883337 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:54Z","lastTransitionTime":"2025-12-05T17:04:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:54 crc kubenswrapper[4753]: I1205 17:04:54.947710 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-98fvv_f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a/ovnkube-controller/0.log" Dec 05 17:04:54 crc kubenswrapper[4753]: I1205 17:04:54.949794 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" event={"ID":"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a","Type":"ContainerStarted","Data":"ad2339c202e5abc4e88ebbfc57c630d5d76fd6fd5e9487a31159ae3d3ebb9970"} Dec 05 17:04:54 crc kubenswrapper[4753]: I1205 17:04:54.949923 4753 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 17:04:54 crc kubenswrapper[4753]: I1205 17:04:54.962261 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:54Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:54 crc kubenswrapper[4753]: I1205 17:04:54.974345 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:54Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:54 crc kubenswrapper[4753]: I1205 17:04:54.983986 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:54Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:54 crc kubenswrapper[4753]: I1205 17:04:54.985296 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:54 crc kubenswrapper[4753]: I1205 17:04:54.985321 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:54 crc kubenswrapper[4753]: I1205 17:04:54.985330 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:54 crc kubenswrapper[4753]: I1205 17:04:54.985343 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:54 crc kubenswrapper[4753]: I1205 17:04:54.985352 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:54Z","lastTransitionTime":"2025-12-05T17:04:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:54 crc kubenswrapper[4753]: I1205 17:04:54.994583 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vj5f7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f38be29-f040-4e7d-9026-36929c0c5cda\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b5d403644e00b367518058ecf22ed9913df9a1353e2fede8802deaa288a48e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc2cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vj5f7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:54Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.005406 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65284411-042e-46b9-8e55-cabc9e78a397\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e83b48779d4b8b709f9bd7351180040315002f738ada5ab034c883f87ef8734\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f6e27e109eeb52dae4e4b35bdf84e78f0786fc8d15c2498dd8d8c70c3d64e4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7886d72b05768ec1d515cfb2837f52ae16e82445b72a3016762308b80892d55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a662a8ca2ca9bcf02ace463bf26e44293b9776c69f3f1f84110392c3cab6e83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:55Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.015804 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.015841 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.015850 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.015880 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.015890 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:55Z","lastTransitionTime":"2025-12-05T17:04:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.018287 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2rg4s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"751d4d21-4eb8-4236-bca2-d81f094ff2f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bda355ba818e87604cf7cfba53f0ee3116f2d4234e7d9631ec7e6571ed34030\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa8d4e5672a07ea28ddba02eaaf603750cd3067ff47165776d2dc7f2ad42a290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa8d4e5672a07ea28ddba02eaaf603750cd3067ff47165776d2dc7f2ad42a290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87e685f0d2c857d143c3ee4b13adf2afe6796c40ae87e2a304b78783907a785a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87e685f0d2c857d143c3ee4b13adf2afe6796c40ae87e2a304b78783907a785a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://842a75d6b100069785dbaf413df3b610a5df28c9ec124100eb31c965985282fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://842a75d6b100069785dbaf413df3b610a5df28c9ec124100eb31c965985282fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4ac00f8ef346db3de91e7bec01a56eac9f27d5a501d6f686173ece4897b9694\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4ac00f8ef346db3de91e7bec01a56eac9f27d5a501d6f686173ece4897b9694\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3585437cbe0f17e5bf92a74123f0e6f13475b6e5d49b145f2b9b869559b53310\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3585437cbe0f17e5bf92a74123f0e6f13475b6e5d49b145f2b9b869559b53310\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e60aa96f2377e63e44e932efe51f79832b778b8ef41fd47e728a80286956c62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e60aa96f2377e63e44e932efe51f79832b778b8ef41fd47e728a80286956c62\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2rg4s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:55Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:55 crc kubenswrapper[4753]: E1205 17:04:55.029433 4753 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6cf3541b-b15a-4035-99a1-dd4a7b86e07c\\\",\\\"systemUUID\\\":\\\"6ae126b1-60b3-4aa4-8711-3da4c1b89426\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:55Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.032873 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.032908 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.032917 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.032932 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.032943 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:55Z","lastTransitionTime":"2025-12-05T17:04:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.037248 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f6585b3b62fc4a78f89b3413326d6d5259cb8c338936f2d5def6185d81d46f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cb72322d436f10b006a4bb7a91b255451aba90e86100fdf249be6443159bd89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d0273d889da0bd6ea3db8040da2eab62fc750f308cb9a62dee71b6c9f2fadd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e79e99eb44ff95c2f9929326313e41629e6e03ebc068537bd27bc4c89ad5b0ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20bcab2c2912120c819bacdf478db82a78500908125e6efadaef1f3409eb0d49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ead15f2a3179f03d770beadcaa1374561d5aebb4874615a334edccff0c2f300\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad2339c202e5abc4e88ebbfc57c630d5d76fd6fd5e9487a31159ae3d3ebb9970\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c8c730b3d1006b876ae704a8e5c67fd685185640e9445b7c66dab106dc505e26\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:04:53Z\\\",\\\"message\\\":\\\":53.045032 6073 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 17:04:53.045574 6073 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 17:04:53.045635 6073 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 17:04:53.045700 6073 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 17:04:53.046252 6073 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 17:04:53.046277 6073 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1205 17:04:53.046281 6073 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1205 17:04:53.046293 6073 factory.go:656] Stopping watch factory\\\\nI1205 17:04:53.046306 6073 ovnkube.go:599] Stopped ovnkube\\\\nI1205 17:04:53.046307 6073 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 17:04:53.046324 6073 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:50Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://899db0b5d45776b0560a2908ce63ec8ea2b458e2e492189737e930688b4ed2e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://073ffcef7324b498f2cb47680b64bf44e474a7732a5c2b34661da22c3e0f6c17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://073ffcef7324b498f2cb47680b64bf44e474a7732a5c2b34661da22c3e0f6c17\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-98fvv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:55Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:55 crc kubenswrapper[4753]: E1205 17:04:55.044036 4753 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6cf3541b-b15a-4035-99a1-dd4a7b86e07c\\\",\\\"systemUUID\\\":\\\"6ae126b1-60b3-4aa4-8711-3da4c1b89426\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:55Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.045812 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-f6qn6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"693c26bb-e75f-4f7f-bd2a-bdf7dcb0af06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02ea0085a961c324f4a426d68972043f4b24b3aa03aed623ebc46bbafc857d1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g4w6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-f6qn6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:55Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.047283 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.047320 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.047329 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.047360 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.047370 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:55Z","lastTransitionTime":"2025-12-05T17:04:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.055536 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://408690fbd818b50273c914cc463acec30ff77bbf7d26680bffa2451a1d1ddcf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:55Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:55 crc kubenswrapper[4753]: E1205 17:04:55.058239 4753 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6cf3541b-b15a-4035-99a1-dd4a7b86e07c\\\",\\\"systemUUID\\\":\\\"6ae126b1-60b3-4aa4-8711-3da4c1b89426\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:55Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.061640 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.061681 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.061693 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.061718 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.061729 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:55Z","lastTransitionTime":"2025-12-05T17:04:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.066582 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78c3b80f4d08dc9fd4215fc87bbd7331f765f301fa587f5fd4ded54cd8db6587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60c5d0ca3d26171050d592c3d76f0db4ca1a12344c40825a8e7c5fd579814c80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-khn68\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:55Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:55 crc kubenswrapper[4753]: E1205 17:04:55.076418 4753 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6cf3541b-b15a-4035-99a1-dd4a7b86e07c\\\",\\\"systemUUID\\\":\\\"6ae126b1-60b3-4aa4-8711-3da4c1b89426\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:55Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.080570 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee37fdbe-64d5-4bb1-8522-932d83e0648e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://195b8bf18e6cf937def214e377ec9def799c19ea4d5f008b66cc42048656ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a5f37b26bfe7933088e08379662250e7b188efbbb5784d49e8d86262e10416\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0061bf86592b301a0a9398d2938e614f76616ed7ddcf310aa50c961632fb58b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20eb41ef00b7711fdfef0e4f3bd83b2fbdb193be96cafc0ef87f048271e7dd78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fea878c0275a3934d26327c13cf5a93ae3b881132788f3d031af5b382bbdd02\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 17:04:35.064086 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 17:04:35.066802 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1773129596/tls.crt::/tmp/serving-cert-1773129596/tls.key\\\\\\\"\\\\nI1205 17:04:40.565607 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 17:04:40.585450 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 17:04:40.585477 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 17:04:40.585506 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 17:04:40.585513 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:04:40.600592 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:04:40.600617 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600622 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600626 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:04:40.600629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:04:40.600632 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:04:40.600635 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:04:40.600704 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 17:04:40.602558 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0585e546af65a61265267b97083556b811ad35ee6c48fa84262b3f4f25eaf907\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:55Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.081553 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.081577 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.081586 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.081598 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.081607 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:55Z","lastTransitionTime":"2025-12-05T17:04:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.092019 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8934d1ec7490b421708e4678d69a55cdd381563ada3d945d066fcf4abba49f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e799acff1e50a664e95e30896e435bc77a35c8560044c5f180e16c3534bcf828\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:55Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:55 crc kubenswrapper[4753]: E1205 17:04:55.093750 4753 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6cf3541b-b15a-4035-99a1-dd4a7b86e07c\\\",\\\"systemUUID\\\":\\\"6ae126b1-60b3-4aa4-8711-3da4c1b89426\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:55Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:55 crc kubenswrapper[4753]: E1205 17:04:55.093861 4753 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.095284 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.095306 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.095314 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.095326 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.095337 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:55Z","lastTransitionTime":"2025-12-05T17:04:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.102081 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c52dbe30298514a572fd7cf4a301357ef0290af5894be7f9d08aff6c3fbe85a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:55Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.111744 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hpl8r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b2cc49130697a15123f202462d2f0781903baea370d4906d22d1b39c23320d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmzrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hpl8r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:55Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.197599 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.197633 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.197641 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.197654 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.197662 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:55Z","lastTransitionTime":"2025-12-05T17:04:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.203419 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.214463 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://408690fbd818b50273c914cc463acec30ff77bbf7d26680bffa2451a1d1ddcf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:55Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.224978 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78c3b80f4d08dc9fd4215fc87bbd7331f765f301fa587f5fd4ded54cd8db6587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60c5d0ca3d26171050d592c3d76f0db4ca1a12344c40825a8e7c5fd579814c80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-khn68\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:55Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.236682 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hpl8r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b2cc49130697a15123f202462d2f0781903baea370d4906d22d1b39c23320d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmzrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hpl8r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:55Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.248470 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee37fdbe-64d5-4bb1-8522-932d83e0648e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://195b8bf18e6cf937def214e377ec9def799c19ea4d5f008b66cc42048656ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a5f37b26bfe7933088e08379662250e7b188efbbb5784d49e8d86262e10416\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0061bf86592b301a0a9398d2938e614f76616ed7ddcf310aa50c961632fb58b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20eb41ef00b7711fdfef0e4f3bd83b2fbdb193be96cafc0ef87f048271e7dd78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fea878c0275a3934d26327c13cf5a93ae3b881132788f3d031af5b382bbdd02\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 17:04:35.064086 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 17:04:35.066802 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1773129596/tls.crt::/tmp/serving-cert-1773129596/tls.key\\\\\\\"\\\\nI1205 17:04:40.565607 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 17:04:40.585450 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 17:04:40.585477 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 17:04:40.585506 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 17:04:40.585513 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:04:40.600592 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:04:40.600617 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600622 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600626 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:04:40.600629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:04:40.600632 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:04:40.600635 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:04:40.600704 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 17:04:40.602558 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0585e546af65a61265267b97083556b811ad35ee6c48fa84262b3f4f25eaf907\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:55Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.259242 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8934d1ec7490b421708e4678d69a55cdd381563ada3d945d066fcf4abba49f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e799acff1e50a664e95e30896e435bc77a35c8560044c5f180e16c3534bcf828\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:55Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.270326 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c52dbe30298514a572fd7cf4a301357ef0290af5894be7f9d08aff6c3fbe85a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:55Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.279302 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vj5f7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f38be29-f040-4e7d-9026-36929c0c5cda\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b5d403644e00b367518058ecf22ed9913df9a1353e2fede8802deaa288a48e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc2cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vj5f7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:55Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.289795 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:55Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.300104 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.300177 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.300186 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.300199 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.300207 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:55Z","lastTransitionTime":"2025-12-05T17:04:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.300893 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:55Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.311907 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:55Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.324021 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2rg4s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"751d4d21-4eb8-4236-bca2-d81f094ff2f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bda355ba818e87604cf7cfba53f0ee3116f2d4234e7d9631ec7e6571ed34030\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa8d4e5672a07ea28ddba02eaaf603750cd3067ff47165776d2dc7f2ad42a290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa8d4e5672a07ea28ddba02eaaf603750cd3067ff47165776d2dc7f2ad42a290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87e685f0d2c857d143c3ee4b13adf2afe6796c40ae87e2a304b78783907a785a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87e685f0d2c857d143c3ee4b13adf2afe6796c40ae87e2a304b78783907a785a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://842a75d6b100069785dbaf413df3b610a5df28c9ec124100eb31c965985282fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://842a75d6b100069785dbaf413df3b610a5df28c9ec124100eb31c965985282fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4ac00f8ef346db3de91e7bec01a56eac9f27d5a501d6f686173ece4897b9694\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4ac00f8ef346db3de91e7bec01a56eac9f27d5a501d6f686173ece4897b9694\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3585437cbe0f17e5bf92a74123f0e6f13475b6e5d49b145f2b9b869559b53310\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3585437cbe0f17e5bf92a74123f0e6f13475b6e5d49b145f2b9b869559b53310\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e60aa96f2377e63e44e932efe51f79832b778b8ef41fd47e728a80286956c62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e60aa96f2377e63e44e932efe51f79832b778b8ef41fd47e728a80286956c62\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2rg4s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:55Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.340423 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f6585b3b62fc4a78f89b3413326d6d5259cb8c338936f2d5def6185d81d46f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cb72322d436f10b006a4bb7a91b255451aba90e86100fdf249be6443159bd89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d0273d889da0bd6ea3db8040da2eab62fc750f308cb9a62dee71b6c9f2fadd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e79e99eb44ff95c2f9929326313e41629e6e03ebc068537bd27bc4c89ad5b0ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20bcab2c2912120c819bacdf478db82a78500908125e6efadaef1f3409eb0d49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ead15f2a3179f03d770beadcaa1374561d5aebb4874615a334edccff0c2f300\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad2339c202e5abc4e88ebbfc57c630d5d76fd6fd5e9487a31159ae3d3ebb9970\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c8c730b3d1006b876ae704a8e5c67fd685185640e9445b7c66dab106dc505e26\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:04:53Z\\\",\\\"message\\\":\\\":53.045032 6073 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 17:04:53.045574 6073 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 17:04:53.045635 6073 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 17:04:53.045700 6073 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 17:04:53.046252 6073 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 17:04:53.046277 6073 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1205 17:04:53.046281 6073 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1205 17:04:53.046293 6073 factory.go:656] Stopping watch factory\\\\nI1205 17:04:53.046306 6073 ovnkube.go:599] Stopped ovnkube\\\\nI1205 17:04:53.046307 6073 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 17:04:53.046324 6073 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:50Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://899db0b5d45776b0560a2908ce63ec8ea2b458e2e492189737e930688b4ed2e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://073ffcef7324b498f2cb47680b64bf44e474a7732a5c2b34661da22c3e0f6c17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://073ffcef7324b498f2cb47680b64bf44e474a7732a5c2b34661da22c3e0f6c17\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-98fvv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:55Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.350291 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-f6qn6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"693c26bb-e75f-4f7f-bd2a-bdf7dcb0af06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02ea0085a961c324f4a426d68972043f4b24b3aa03aed623ebc46bbafc857d1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g4w6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-f6qn6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:55Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.361639 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65284411-042e-46b9-8e55-cabc9e78a397\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e83b48779d4b8b709f9bd7351180040315002f738ada5ab034c883f87ef8734\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f6e27e109eeb52dae4e4b35bdf84e78f0786fc8d15c2498dd8d8c70c3d64e4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7886d72b05768ec1d515cfb2837f52ae16e82445b72a3016762308b80892d55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a662a8ca2ca9bcf02ace463bf26e44293b9776c69f3f1f84110392c3cab6e83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:55Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.402536 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.402590 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.402606 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.402630 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.402647 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:55Z","lastTransitionTime":"2025-12-05T17:04:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.504939 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.504994 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.505005 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.505023 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.505034 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:55Z","lastTransitionTime":"2025-12-05T17:04:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.607379 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.607479 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.607492 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.607507 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.607518 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:55Z","lastTransitionTime":"2025-12-05T17:04:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.709692 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.709731 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.709748 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.709764 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.709774 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:55Z","lastTransitionTime":"2025-12-05T17:04:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.720273 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:04:55 crc kubenswrapper[4753]: E1205 17:04:55.720410 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.734460 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bhvk4"] Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.734880 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bhvk4" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.736635 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.738653 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.748903 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:55Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.760461 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:55Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.784591 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:55Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.801855 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vj5f7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f38be29-f040-4e7d-9026-36929c0c5cda\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b5d403644e00b367518058ecf22ed9913df9a1353e2fede8802deaa288a48e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc2cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vj5f7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:55Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.812234 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.812271 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.812281 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.812296 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.812308 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:55Z","lastTransitionTime":"2025-12-05T17:04:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.828834 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-f6qn6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"693c26bb-e75f-4f7f-bd2a-bdf7dcb0af06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02ea0085a961c324f4a426d68972043f4b24b3aa03aed623ebc46bbafc857d1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g4w6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-f6qn6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:55Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.839301 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bhvk4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"338d3da9-209c-4ca9-a37d-6ea5731d1622\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qh6zg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qh6zg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-bhvk4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:55Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.851437 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65284411-042e-46b9-8e55-cabc9e78a397\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e83b48779d4b8b709f9bd7351180040315002f738ada5ab034c883f87ef8734\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f6e27e109eeb52dae4e4b35bdf84e78f0786fc8d15c2498dd8d8c70c3d64e4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7886d72b05768ec1d515cfb2837f52ae16e82445b72a3016762308b80892d55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a662a8ca2ca9bcf02ace463bf26e44293b9776c69f3f1f84110392c3cab6e83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:55Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.863883 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2rg4s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"751d4d21-4eb8-4236-bca2-d81f094ff2f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bda355ba818e87604cf7cfba53f0ee3116f2d4234e7d9631ec7e6571ed34030\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa8d4e5672a07ea28ddba02eaaf603750cd3067ff47165776d2dc7f2ad42a290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa8d4e5672a07ea28ddba02eaaf603750cd3067ff47165776d2dc7f2ad42a290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87e685f0d2c857d143c3ee4b13adf2afe6796c40ae87e2a304b78783907a785a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87e685f0d2c857d143c3ee4b13adf2afe6796c40ae87e2a304b78783907a785a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://842a75d6b100069785dbaf413df3b610a5df28c9ec124100eb31c965985282fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://842a75d6b100069785dbaf413df3b610a5df28c9ec124100eb31c965985282fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4ac00f8ef346db3de91e7bec01a56eac9f27d5a501d6f686173ece4897b9694\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4ac00f8ef346db3de91e7bec01a56eac9f27d5a501d6f686173ece4897b9694\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3585437cbe0f17e5bf92a74123f0e6f13475b6e5d49b145f2b9b869559b53310\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3585437cbe0f17e5bf92a74123f0e6f13475b6e5d49b145f2b9b869559b53310\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e60aa96f2377e63e44e932efe51f79832b778b8ef41fd47e728a80286956c62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e60aa96f2377e63e44e932efe51f79832b778b8ef41fd47e728a80286956c62\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2rg4s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:55Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.879279 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f6585b3b62fc4a78f89b3413326d6d5259cb8c338936f2d5def6185d81d46f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cb72322d436f10b006a4bb7a91b255451aba90e86100fdf249be6443159bd89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d0273d889da0bd6ea3db8040da2eab62fc750f308cb9a62dee71b6c9f2fadd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e79e99eb44ff95c2f9929326313e41629e6e03ebc068537bd27bc4c89ad5b0ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20bcab2c2912120c819bacdf478db82a78500908125e6efadaef1f3409eb0d49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ead15f2a3179f03d770beadcaa1374561d5aebb4874615a334edccff0c2f300\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad2339c202e5abc4e88ebbfc57c630d5d76fd6fd5e9487a31159ae3d3ebb9970\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c8c730b3d1006b876ae704a8e5c67fd685185640e9445b7c66dab106dc505e26\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:04:53Z\\\",\\\"message\\\":\\\":53.045032 6073 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 17:04:53.045574 6073 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 17:04:53.045635 6073 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 17:04:53.045700 6073 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 17:04:53.046252 6073 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 17:04:53.046277 6073 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1205 17:04:53.046281 6073 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1205 17:04:53.046293 6073 factory.go:656] Stopping watch factory\\\\nI1205 17:04:53.046306 6073 ovnkube.go:599] Stopped ovnkube\\\\nI1205 17:04:53.046307 6073 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 17:04:53.046324 6073 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:50Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://899db0b5d45776b0560a2908ce63ec8ea2b458e2e492189737e930688b4ed2e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://073ffcef7324b498f2cb47680b64bf44e474a7732a5c2b34661da22c3e0f6c17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://073ffcef7324b498f2cb47680b64bf44e474a7732a5c2b34661da22c3e0f6c17\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-98fvv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:55Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.889895 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://408690fbd818b50273c914cc463acec30ff77bbf7d26680bffa2451a1d1ddcf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:55Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.898965 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78c3b80f4d08dc9fd4215fc87bbd7331f765f301fa587f5fd4ded54cd8db6587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60c5d0ca3d26171050d592c3d76f0db4ca1a12344c40825a8e7c5fd579814c80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-khn68\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:55Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.909656 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee37fdbe-64d5-4bb1-8522-932d83e0648e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://195b8bf18e6cf937def214e377ec9def799c19ea4d5f008b66cc42048656ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a5f37b26bfe7933088e08379662250e7b188efbbb5784d49e8d86262e10416\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0061bf86592b301a0a9398d2938e614f76616ed7ddcf310aa50c961632fb58b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20eb41ef00b7711fdfef0e4f3bd83b2fbdb193be96cafc0ef87f048271e7dd78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fea878c0275a3934d26327c13cf5a93ae3b881132788f3d031af5b382bbdd02\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 17:04:35.064086 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 17:04:35.066802 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1773129596/tls.crt::/tmp/serving-cert-1773129596/tls.key\\\\\\\"\\\\nI1205 17:04:40.565607 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 17:04:40.585450 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 17:04:40.585477 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 17:04:40.585506 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 17:04:40.585513 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:04:40.600592 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:04:40.600617 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600622 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600626 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:04:40.600629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:04:40.600632 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:04:40.600635 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:04:40.600704 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 17:04:40.602558 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0585e546af65a61265267b97083556b811ad35ee6c48fa84262b3f4f25eaf907\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:55Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.913970 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.914003 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.914015 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.914031 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.914043 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:55Z","lastTransitionTime":"2025-12-05T17:04:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.921235 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8934d1ec7490b421708e4678d69a55cdd381563ada3d945d066fcf4abba49f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e799acff1e50a664e95e30896e435bc77a35c8560044c5f180e16c3534bcf828\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:55Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.923351 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qh6zg\" (UniqueName: \"kubernetes.io/projected/338d3da9-209c-4ca9-a37d-6ea5731d1622-kube-api-access-qh6zg\") pod \"ovnkube-control-plane-749d76644c-bhvk4\" (UID: \"338d3da9-209c-4ca9-a37d-6ea5731d1622\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bhvk4" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.923442 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/338d3da9-209c-4ca9-a37d-6ea5731d1622-env-overrides\") pod \"ovnkube-control-plane-749d76644c-bhvk4\" (UID: \"338d3da9-209c-4ca9-a37d-6ea5731d1622\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bhvk4" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.923477 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/338d3da9-209c-4ca9-a37d-6ea5731d1622-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-bhvk4\" (UID: \"338d3da9-209c-4ca9-a37d-6ea5731d1622\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bhvk4" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.923551 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/338d3da9-209c-4ca9-a37d-6ea5731d1622-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-bhvk4\" (UID: \"338d3da9-209c-4ca9-a37d-6ea5731d1622\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bhvk4" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.932455 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c52dbe30298514a572fd7cf4a301357ef0290af5894be7f9d08aff6c3fbe85a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:55Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:55 crc kubenswrapper[4753]: I1205 17:04:55.944811 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hpl8r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b2cc49130697a15123f202462d2f0781903baea370d4906d22d1b39c23320d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmzrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hpl8r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:55Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.015816 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.015864 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.015875 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.015893 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.015936 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:56Z","lastTransitionTime":"2025-12-05T17:04:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.024278 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/338d3da9-209c-4ca9-a37d-6ea5731d1622-env-overrides\") pod \"ovnkube-control-plane-749d76644c-bhvk4\" (UID: \"338d3da9-209c-4ca9-a37d-6ea5731d1622\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bhvk4" Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.024307 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qh6zg\" (UniqueName: \"kubernetes.io/projected/338d3da9-209c-4ca9-a37d-6ea5731d1622-kube-api-access-qh6zg\") pod \"ovnkube-control-plane-749d76644c-bhvk4\" (UID: \"338d3da9-209c-4ca9-a37d-6ea5731d1622\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bhvk4" Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.024332 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/338d3da9-209c-4ca9-a37d-6ea5731d1622-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-bhvk4\" (UID: \"338d3da9-209c-4ca9-a37d-6ea5731d1622\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bhvk4" Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.024354 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/338d3da9-209c-4ca9-a37d-6ea5731d1622-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-bhvk4\" (UID: \"338d3da9-209c-4ca9-a37d-6ea5731d1622\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bhvk4" Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.025101 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/338d3da9-209c-4ca9-a37d-6ea5731d1622-env-overrides\") pod \"ovnkube-control-plane-749d76644c-bhvk4\" (UID: \"338d3da9-209c-4ca9-a37d-6ea5731d1622\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bhvk4" Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.025277 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/338d3da9-209c-4ca9-a37d-6ea5731d1622-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-bhvk4\" (UID: \"338d3da9-209c-4ca9-a37d-6ea5731d1622\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bhvk4" Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.029737 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/338d3da9-209c-4ca9-a37d-6ea5731d1622-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-bhvk4\" (UID: \"338d3da9-209c-4ca9-a37d-6ea5731d1622\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bhvk4" Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.041347 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qh6zg\" (UniqueName: \"kubernetes.io/projected/338d3da9-209c-4ca9-a37d-6ea5731d1622-kube-api-access-qh6zg\") pod \"ovnkube-control-plane-749d76644c-bhvk4\" (UID: \"338d3da9-209c-4ca9-a37d-6ea5731d1622\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bhvk4" Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.046169 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bhvk4" Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.121481 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.121520 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.121530 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.121545 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.121555 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:56Z","lastTransitionTime":"2025-12-05T17:04:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.223522 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.223556 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.223564 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.223579 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.223589 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:56Z","lastTransitionTime":"2025-12-05T17:04:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.325821 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.325865 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.325877 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.325893 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.325906 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:56Z","lastTransitionTime":"2025-12-05T17:04:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.428402 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.428445 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.428461 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.428476 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.428485 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:56Z","lastTransitionTime":"2025-12-05T17:04:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.531307 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.531364 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.531379 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.531431 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.531450 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:56Z","lastTransitionTime":"2025-12-05T17:04:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.629442 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.629532 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.629568 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.629601 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.629633 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:04:56 crc kubenswrapper[4753]: E1205 17:04:56.629739 4753 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 17:04:56 crc kubenswrapper[4753]: E1205 17:04:56.629790 4753 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 17:04:56 crc kubenswrapper[4753]: E1205 17:04:56.629804 4753 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 17:04:56 crc kubenswrapper[4753]: E1205 17:04:56.630030 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:05:12.630007691 +0000 UTC m=+51.133114757 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:04:56 crc kubenswrapper[4753]: E1205 17:04:56.630109 4753 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 17:04:56 crc kubenswrapper[4753]: E1205 17:04:56.630160 4753 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 17:04:56 crc kubenswrapper[4753]: E1205 17:04:56.630212 4753 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 17:04:56 crc kubenswrapper[4753]: E1205 17:04:56.630245 4753 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 17:04:56 crc kubenswrapper[4753]: E1205 17:04:56.630184 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 17:05:12.630141866 +0000 UTC m=+51.133248972 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 17:04:56 crc kubenswrapper[4753]: E1205 17:04:56.630256 4753 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 17:04:56 crc kubenswrapper[4753]: E1205 17:04:56.630311 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 17:05:12.63026718 +0000 UTC m=+51.133374206 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 17:04:56 crc kubenswrapper[4753]: E1205 17:04:56.630346 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 17:05:12.630331832 +0000 UTC m=+51.133438938 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 17:04:56 crc kubenswrapper[4753]: E1205 17:04:56.630364 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 17:05:12.630357773 +0000 UTC m=+51.133464919 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.633476 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.633498 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.633510 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.633523 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.633531 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:56Z","lastTransitionTime":"2025-12-05T17:04:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.720332 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:04:56 crc kubenswrapper[4753]: E1205 17:04:56.720452 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.720506 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:04:56 crc kubenswrapper[4753]: E1205 17:04:56.720546 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.735845 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.735879 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.735889 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.735904 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.735915 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:56Z","lastTransitionTime":"2025-12-05T17:04:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.838305 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.838340 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.838349 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.838363 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.838372 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:56Z","lastTransitionTime":"2025-12-05T17:04:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.940841 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.940926 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.940946 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.940982 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.940999 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:56Z","lastTransitionTime":"2025-12-05T17:04:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.957117 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bhvk4" event={"ID":"338d3da9-209c-4ca9-a37d-6ea5731d1622","Type":"ContainerStarted","Data":"097885ac231f081a81d51cf0091df6f93ca49068340438454db429194e5475b4"} Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.957231 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bhvk4" event={"ID":"338d3da9-209c-4ca9-a37d-6ea5731d1622","Type":"ContainerStarted","Data":"7747e4738055a7b994119bad9b30d47e0b510c7407c53df29b674c553dbbcbb2"} Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.957254 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bhvk4" event={"ID":"338d3da9-209c-4ca9-a37d-6ea5731d1622","Type":"ContainerStarted","Data":"1ae37b9c7ca9c27b0018676ffd215ceac54652727ed2d92a05b6cd06e63c0651"} Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.959240 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-98fvv_f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a/ovnkube-controller/1.log" Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.959886 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-98fvv_f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a/ovnkube-controller/0.log" Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.962896 4753 generic.go:334] "Generic (PLEG): container finished" podID="f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" containerID="ad2339c202e5abc4e88ebbfc57c630d5d76fd6fd5e9487a31159ae3d3ebb9970" exitCode=1 Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.962931 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" event={"ID":"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a","Type":"ContainerDied","Data":"ad2339c202e5abc4e88ebbfc57c630d5d76fd6fd5e9487a31159ae3d3ebb9970"} Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.963016 4753 scope.go:117] "RemoveContainer" containerID="c8c730b3d1006b876ae704a8e5c67fd685185640e9445b7c66dab106dc505e26" Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.963778 4753 scope.go:117] "RemoveContainer" containerID="ad2339c202e5abc4e88ebbfc57c630d5d76fd6fd5e9487a31159ae3d3ebb9970" Dec 05 17:04:56 crc kubenswrapper[4753]: E1205 17:04:56.963933 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-98fvv_openshift-ovn-kubernetes(f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a)\"" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" podUID="f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.974204 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee37fdbe-64d5-4bb1-8522-932d83e0648e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://195b8bf18e6cf937def214e377ec9def799c19ea4d5f008b66cc42048656ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a5f37b26bfe7933088e08379662250e7b188efbbb5784d49e8d86262e10416\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0061bf86592b301a0a9398d2938e614f76616ed7ddcf310aa50c961632fb58b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20eb41ef00b7711fdfef0e4f3bd83b2fbdb193be96cafc0ef87f048271e7dd78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fea878c0275a3934d26327c13cf5a93ae3b881132788f3d031af5b382bbdd02\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 17:04:35.064086 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 17:04:35.066802 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1773129596/tls.crt::/tmp/serving-cert-1773129596/tls.key\\\\\\\"\\\\nI1205 17:04:40.565607 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 17:04:40.585450 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 17:04:40.585477 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 17:04:40.585506 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 17:04:40.585513 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:04:40.600592 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:04:40.600617 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600622 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600626 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:04:40.600629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:04:40.600632 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:04:40.600635 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:04:40.600704 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 17:04:40.602558 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0585e546af65a61265267b97083556b811ad35ee6c48fa84262b3f4f25eaf907\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:56Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:56 crc kubenswrapper[4753]: I1205 17:04:56.989960 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8934d1ec7490b421708e4678d69a55cdd381563ada3d945d066fcf4abba49f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e799acff1e50a664e95e30896e435bc77a35c8560044c5f180e16c3534bcf828\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:56Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.003377 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c52dbe30298514a572fd7cf4a301357ef0290af5894be7f9d08aff6c3fbe85a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:57Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.016660 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hpl8r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b2cc49130697a15123f202462d2f0781903baea370d4906d22d1b39c23320d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmzrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hpl8r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:57Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.027891 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:57Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.041520 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:57Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.043948 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.043994 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.044004 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.044023 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.044036 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:57Z","lastTransitionTime":"2025-12-05T17:04:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.053429 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:57Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.064047 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vj5f7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f38be29-f040-4e7d-9026-36929c0c5cda\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b5d403644e00b367518058ecf22ed9913df9a1353e2fede8802deaa288a48e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc2cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vj5f7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:57Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.075951 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65284411-042e-46b9-8e55-cabc9e78a397\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e83b48779d4b8b709f9bd7351180040315002f738ada5ab034c883f87ef8734\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f6e27e109eeb52dae4e4b35bdf84e78f0786fc8d15c2498dd8d8c70c3d64e4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7886d72b05768ec1d515cfb2837f52ae16e82445b72a3016762308b80892d55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a662a8ca2ca9bcf02ace463bf26e44293b9776c69f3f1f84110392c3cab6e83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:57Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.089680 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2rg4s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"751d4d21-4eb8-4236-bca2-d81f094ff2f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bda355ba818e87604cf7cfba53f0ee3116f2d4234e7d9631ec7e6571ed34030\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa8d4e5672a07ea28ddba02eaaf603750cd3067ff47165776d2dc7f2ad42a290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa8d4e5672a07ea28ddba02eaaf603750cd3067ff47165776d2dc7f2ad42a290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87e685f0d2c857d143c3ee4b13adf2afe6796c40ae87e2a304b78783907a785a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87e685f0d2c857d143c3ee4b13adf2afe6796c40ae87e2a304b78783907a785a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://842a75d6b100069785dbaf413df3b610a5df28c9ec124100eb31c965985282fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://842a75d6b100069785dbaf413df3b610a5df28c9ec124100eb31c965985282fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4ac00f8ef346db3de91e7bec01a56eac9f27d5a501d6f686173ece4897b9694\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4ac00f8ef346db3de91e7bec01a56eac9f27d5a501d6f686173ece4897b9694\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3585437cbe0f17e5bf92a74123f0e6f13475b6e5d49b145f2b9b869559b53310\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3585437cbe0f17e5bf92a74123f0e6f13475b6e5d49b145f2b9b869559b53310\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e60aa96f2377e63e44e932efe51f79832b778b8ef41fd47e728a80286956c62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e60aa96f2377e63e44e932efe51f79832b778b8ef41fd47e728a80286956c62\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2rg4s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:57Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.106599 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f6585b3b62fc4a78f89b3413326d6d5259cb8c338936f2d5def6185d81d46f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cb72322d436f10b006a4bb7a91b255451aba90e86100fdf249be6443159bd89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d0273d889da0bd6ea3db8040da2eab62fc750f308cb9a62dee71b6c9f2fadd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e79e99eb44ff95c2f9929326313e41629e6e03ebc068537bd27bc4c89ad5b0ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20bcab2c2912120c819bacdf478db82a78500908125e6efadaef1f3409eb0d49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ead15f2a3179f03d770beadcaa1374561d5aebb4874615a334edccff0c2f300\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad2339c202e5abc4e88ebbfc57c630d5d76fd6fd5e9487a31159ae3d3ebb9970\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c8c730b3d1006b876ae704a8e5c67fd685185640e9445b7c66dab106dc505e26\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:04:53Z\\\",\\\"message\\\":\\\":53.045032 6073 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 17:04:53.045574 6073 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 17:04:53.045635 6073 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 17:04:53.045700 6073 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 17:04:53.046252 6073 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 17:04:53.046277 6073 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1205 17:04:53.046281 6073 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1205 17:04:53.046293 6073 factory.go:656] Stopping watch factory\\\\nI1205 17:04:53.046306 6073 ovnkube.go:599] Stopped ovnkube\\\\nI1205 17:04:53.046307 6073 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 17:04:53.046324 6073 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:50Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://899db0b5d45776b0560a2908ce63ec8ea2b458e2e492189737e930688b4ed2e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://073ffcef7324b498f2cb47680b64bf44e474a7732a5c2b34661da22c3e0f6c17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://073ffcef7324b498f2cb47680b64bf44e474a7732a5c2b34661da22c3e0f6c17\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-98fvv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:57Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.116406 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-f6qn6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"693c26bb-e75f-4f7f-bd2a-bdf7dcb0af06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02ea0085a961c324f4a426d68972043f4b24b3aa03aed623ebc46bbafc857d1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g4w6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-f6qn6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:57Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.126086 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bhvk4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"338d3da9-209c-4ca9-a37d-6ea5731d1622\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7747e4738055a7b994119bad9b30d47e0b510c7407c53df29b674c553dbbcbb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qh6zg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://097885ac231f081a81d51cf0091df6f93ca49068340438454db429194e5475b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qh6zg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-bhvk4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:57Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.135726 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://408690fbd818b50273c914cc463acec30ff77bbf7d26680bffa2451a1d1ddcf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:57Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.146262 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.146290 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.146307 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.146320 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.146329 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:57Z","lastTransitionTime":"2025-12-05T17:04:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.147304 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78c3b80f4d08dc9fd4215fc87bbd7331f765f301fa587f5fd4ded54cd8db6587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60c5d0ca3d26171050d592c3d76f0db4ca1a12344c40825a8e7c5fd579814c80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-khn68\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:57Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.159425 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://408690fbd818b50273c914cc463acec30ff77bbf7d26680bffa2451a1d1ddcf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:57Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.170615 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78c3b80f4d08dc9fd4215fc87bbd7331f765f301fa587f5fd4ded54cd8db6587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60c5d0ca3d26171050d592c3d76f0db4ca1a12344c40825a8e7c5fd579814c80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-khn68\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:57Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.171166 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-jjgfd"] Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.171699 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jjgfd" Dec 05 17:04:57 crc kubenswrapper[4753]: E1205 17:04:57.171766 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jjgfd" podUID="00ab636b-9cc9-4a6f-8e6e-6442b35280ca" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.187872 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee37fdbe-64d5-4bb1-8522-932d83e0648e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://195b8bf18e6cf937def214e377ec9def799c19ea4d5f008b66cc42048656ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a5f37b26bfe7933088e08379662250e7b188efbbb5784d49e8d86262e10416\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0061bf86592b301a0a9398d2938e614f76616ed7ddcf310aa50c961632fb58b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20eb41ef00b7711fdfef0e4f3bd83b2fbdb193be96cafc0ef87f048271e7dd78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fea878c0275a3934d26327c13cf5a93ae3b881132788f3d031af5b382bbdd02\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 17:04:35.064086 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 17:04:35.066802 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1773129596/tls.crt::/tmp/serving-cert-1773129596/tls.key\\\\\\\"\\\\nI1205 17:04:40.565607 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 17:04:40.585450 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 17:04:40.585477 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 17:04:40.585506 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 17:04:40.585513 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:04:40.600592 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:04:40.600617 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600622 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600626 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:04:40.600629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:04:40.600632 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:04:40.600635 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:04:40.600704 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 17:04:40.602558 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0585e546af65a61265267b97083556b811ad35ee6c48fa84262b3f4f25eaf907\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:57Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.200482 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8934d1ec7490b421708e4678d69a55cdd381563ada3d945d066fcf4abba49f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e799acff1e50a664e95e30896e435bc77a35c8560044c5f180e16c3534bcf828\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:57Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.211283 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c52dbe30298514a572fd7cf4a301357ef0290af5894be7f9d08aff6c3fbe85a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:57Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.221939 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hpl8r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b2cc49130697a15123f202462d2f0781903baea370d4906d22d1b39c23320d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmzrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hpl8r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:57Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.231435 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:57Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.234368 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bhlg5\" (UniqueName: \"kubernetes.io/projected/00ab636b-9cc9-4a6f-8e6e-6442b35280ca-kube-api-access-bhlg5\") pod \"network-metrics-daemon-jjgfd\" (UID: \"00ab636b-9cc9-4a6f-8e6e-6442b35280ca\") " pod="openshift-multus/network-metrics-daemon-jjgfd" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.234399 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/00ab636b-9cc9-4a6f-8e6e-6442b35280ca-metrics-certs\") pod \"network-metrics-daemon-jjgfd\" (UID: \"00ab636b-9cc9-4a6f-8e6e-6442b35280ca\") " pod="openshift-multus/network-metrics-daemon-jjgfd" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.242783 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:57Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.248554 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.248585 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.248596 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.248611 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.248621 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:57Z","lastTransitionTime":"2025-12-05T17:04:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.254427 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:57Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.263790 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vj5f7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f38be29-f040-4e7d-9026-36929c0c5cda\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b5d403644e00b367518058ecf22ed9913df9a1353e2fede8802deaa288a48e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc2cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vj5f7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:57Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.276693 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2rg4s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"751d4d21-4eb8-4236-bca2-d81f094ff2f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bda355ba818e87604cf7cfba53f0ee3116f2d4234e7d9631ec7e6571ed34030\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa8d4e5672a07ea28ddba02eaaf603750cd3067ff47165776d2dc7f2ad42a290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa8d4e5672a07ea28ddba02eaaf603750cd3067ff47165776d2dc7f2ad42a290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87e685f0d2c857d143c3ee4b13adf2afe6796c40ae87e2a304b78783907a785a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87e685f0d2c857d143c3ee4b13adf2afe6796c40ae87e2a304b78783907a785a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://842a75d6b100069785dbaf413df3b610a5df28c9ec124100eb31c965985282fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://842a75d6b100069785dbaf413df3b610a5df28c9ec124100eb31c965985282fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4ac00f8ef346db3de91e7bec01a56eac9f27d5a501d6f686173ece4897b9694\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4ac00f8ef346db3de91e7bec01a56eac9f27d5a501d6f686173ece4897b9694\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3585437cbe0f17e5bf92a74123f0e6f13475b6e5d49b145f2b9b869559b53310\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3585437cbe0f17e5bf92a74123f0e6f13475b6e5d49b145f2b9b869559b53310\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e60aa96f2377e63e44e932efe51f79832b778b8ef41fd47e728a80286956c62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e60aa96f2377e63e44e932efe51f79832b778b8ef41fd47e728a80286956c62\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2rg4s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:57Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.293414 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f6585b3b62fc4a78f89b3413326d6d5259cb8c338936f2d5def6185d81d46f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cb72322d436f10b006a4bb7a91b255451aba90e86100fdf249be6443159bd89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d0273d889da0bd6ea3db8040da2eab62fc750f308cb9a62dee71b6c9f2fadd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e79e99eb44ff95c2f9929326313e41629e6e03ebc068537bd27bc4c89ad5b0ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20bcab2c2912120c819bacdf478db82a78500908125e6efadaef1f3409eb0d49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ead15f2a3179f03d770beadcaa1374561d5aebb4874615a334edccff0c2f300\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad2339c202e5abc4e88ebbfc57c630d5d76fd6fd5e9487a31159ae3d3ebb9970\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c8c730b3d1006b876ae704a8e5c67fd685185640e9445b7c66dab106dc505e26\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:04:53Z\\\",\\\"message\\\":\\\":53.045032 6073 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 17:04:53.045574 6073 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 17:04:53.045635 6073 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 17:04:53.045700 6073 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 17:04:53.046252 6073 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 17:04:53.046277 6073 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1205 17:04:53.046281 6073 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1205 17:04:53.046293 6073 factory.go:656] Stopping watch factory\\\\nI1205 17:04:53.046306 6073 ovnkube.go:599] Stopped ovnkube\\\\nI1205 17:04:53.046307 6073 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 17:04:53.046324 6073 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:50Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad2339c202e5abc4e88ebbfc57c630d5d76fd6fd5e9487a31159ae3d3ebb9970\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"message\\\":\\\"tion, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:54Z is after 2025-08-24T17:21:41Z]\\\\nI1205 17:04:54.770375 6204 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g\\\\nI1205 17:04:54.770514 6204 ovn.go:134] Ensuring zone local for Pod openshift-machine-config-operator/machine-config-daemon-khn68 in node crc\\\\nI1205 17:04:54.770521 6204 ovn.go:134] Ensuring zone local for Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g in node crc\\\\nI1205 17:04:54.770512 6204 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-hpl8r\\\\nI1205 17:04:54.770522 6204 obj_retry.go:386] Retry successful for *v1.Pod openshift-machine-config-operator/machine-config-daemon-khn68 after 0 failed attempt(s)\\\\nI1205 17:04:54.770531 6204 default_network_controller.go:776] Recording success event on pod openshift\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://899db0b5d45776b0560a2908ce63ec8ea2b458e2e492189737e930688b4ed2e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://073ffcef7324b498f2cb47680b64bf44e474a7732a5c2b34661da22c3e0f6c17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://073ffcef7324b498f2cb47680b64bf44e474a7732a5c2b34661da22c3e0f6c17\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-98fvv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:57Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.303446 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-f6qn6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"693c26bb-e75f-4f7f-bd2a-bdf7dcb0af06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02ea0085a961c324f4a426d68972043f4b24b3aa03aed623ebc46bbafc857d1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g4w6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-f6qn6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:57Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.313772 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bhvk4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"338d3da9-209c-4ca9-a37d-6ea5731d1622\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7747e4738055a7b994119bad9b30d47e0b510c7407c53df29b674c553dbbcbb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qh6zg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://097885ac231f081a81d51cf0091df6f93ca49068340438454db429194e5475b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qh6zg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-bhvk4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:57Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.323972 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65284411-042e-46b9-8e55-cabc9e78a397\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e83b48779d4b8b709f9bd7351180040315002f738ada5ab034c883f87ef8734\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f6e27e109eeb52dae4e4b35bdf84e78f0786fc8d15c2498dd8d8c70c3d64e4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7886d72b05768ec1d515cfb2837f52ae16e82445b72a3016762308b80892d55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a662a8ca2ca9bcf02ace463bf26e44293b9776c69f3f1f84110392c3cab6e83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:57Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.334065 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://408690fbd818b50273c914cc463acec30ff77bbf7d26680bffa2451a1d1ddcf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:57Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.335329 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bhlg5\" (UniqueName: \"kubernetes.io/projected/00ab636b-9cc9-4a6f-8e6e-6442b35280ca-kube-api-access-bhlg5\") pod \"network-metrics-daemon-jjgfd\" (UID: \"00ab636b-9cc9-4a6f-8e6e-6442b35280ca\") " pod="openshift-multus/network-metrics-daemon-jjgfd" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.335368 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/00ab636b-9cc9-4a6f-8e6e-6442b35280ca-metrics-certs\") pod \"network-metrics-daemon-jjgfd\" (UID: \"00ab636b-9cc9-4a6f-8e6e-6442b35280ca\") " pod="openshift-multus/network-metrics-daemon-jjgfd" Dec 05 17:04:57 crc kubenswrapper[4753]: E1205 17:04:57.335468 4753 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 17:04:57 crc kubenswrapper[4753]: E1205 17:04:57.335514 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/00ab636b-9cc9-4a6f-8e6e-6442b35280ca-metrics-certs podName:00ab636b-9cc9-4a6f-8e6e-6442b35280ca nodeName:}" failed. No retries permitted until 2025-12-05 17:04:57.835502378 +0000 UTC m=+36.338609384 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/00ab636b-9cc9-4a6f-8e6e-6442b35280ca-metrics-certs") pod "network-metrics-daemon-jjgfd" (UID: "00ab636b-9cc9-4a6f-8e6e-6442b35280ca") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.344602 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78c3b80f4d08dc9fd4215fc87bbd7331f765f301fa587f5fd4ded54cd8db6587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60c5d0ca3d26171050d592c3d76f0db4ca1a12344c40825a8e7c5fd579814c80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-khn68\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:57Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.350576 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.350604 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.350613 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.350630 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.350640 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:57Z","lastTransitionTime":"2025-12-05T17:04:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.353827 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bhlg5\" (UniqueName: \"kubernetes.io/projected/00ab636b-9cc9-4a6f-8e6e-6442b35280ca-kube-api-access-bhlg5\") pod \"network-metrics-daemon-jjgfd\" (UID: \"00ab636b-9cc9-4a6f-8e6e-6442b35280ca\") " pod="openshift-multus/network-metrics-daemon-jjgfd" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.357953 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-jjgfd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"00ab636b-9cc9-4a6f-8e6e-6442b35280ca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhlg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhlg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:57Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-jjgfd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:57Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.371517 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee37fdbe-64d5-4bb1-8522-932d83e0648e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://195b8bf18e6cf937def214e377ec9def799c19ea4d5f008b66cc42048656ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a5f37b26bfe7933088e08379662250e7b188efbbb5784d49e8d86262e10416\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0061bf86592b301a0a9398d2938e614f76616ed7ddcf310aa50c961632fb58b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20eb41ef00b7711fdfef0e4f3bd83b2fbdb193be96cafc0ef87f048271e7dd78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fea878c0275a3934d26327c13cf5a93ae3b881132788f3d031af5b382bbdd02\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 17:04:35.064086 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 17:04:35.066802 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1773129596/tls.crt::/tmp/serving-cert-1773129596/tls.key\\\\\\\"\\\\nI1205 17:04:40.565607 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 17:04:40.585450 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 17:04:40.585477 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 17:04:40.585506 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 17:04:40.585513 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:04:40.600592 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:04:40.600617 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600622 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600626 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:04:40.600629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:04:40.600632 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:04:40.600635 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:04:40.600704 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 17:04:40.602558 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0585e546af65a61265267b97083556b811ad35ee6c48fa84262b3f4f25eaf907\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:57Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.382913 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8934d1ec7490b421708e4678d69a55cdd381563ada3d945d066fcf4abba49f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e799acff1e50a664e95e30896e435bc77a35c8560044c5f180e16c3534bcf828\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:57Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.399012 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c52dbe30298514a572fd7cf4a301357ef0290af5894be7f9d08aff6c3fbe85a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:57Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.410841 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hpl8r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b2cc49130697a15123f202462d2f0781903baea370d4906d22d1b39c23320d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmzrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hpl8r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:57Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.421932 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:57Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.434949 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:57Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.447447 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:57Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.453101 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.453142 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.453165 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.453183 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.453195 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:57Z","lastTransitionTime":"2025-12-05T17:04:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.457611 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vj5f7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f38be29-f040-4e7d-9026-36929c0c5cda\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b5d403644e00b367518058ecf22ed9913df9a1353e2fede8802deaa288a48e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc2cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vj5f7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:57Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.469401 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bhvk4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"338d3da9-209c-4ca9-a37d-6ea5731d1622\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7747e4738055a7b994119bad9b30d47e0b510c7407c53df29b674c553dbbcbb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qh6zg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://097885ac231f081a81d51cf0091df6f93ca49068340438454db429194e5475b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qh6zg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-bhvk4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:57Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.480602 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65284411-042e-46b9-8e55-cabc9e78a397\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e83b48779d4b8b709f9bd7351180040315002f738ada5ab034c883f87ef8734\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f6e27e109eeb52dae4e4b35bdf84e78f0786fc8d15c2498dd8d8c70c3d64e4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7886d72b05768ec1d515cfb2837f52ae16e82445b72a3016762308b80892d55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a662a8ca2ca9bcf02ace463bf26e44293b9776c69f3f1f84110392c3cab6e83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:57Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.494020 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2rg4s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"751d4d21-4eb8-4236-bca2-d81f094ff2f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bda355ba818e87604cf7cfba53f0ee3116f2d4234e7d9631ec7e6571ed34030\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa8d4e5672a07ea28ddba02eaaf603750cd3067ff47165776d2dc7f2ad42a290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa8d4e5672a07ea28ddba02eaaf603750cd3067ff47165776d2dc7f2ad42a290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87e685f0d2c857d143c3ee4b13adf2afe6796c40ae87e2a304b78783907a785a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87e685f0d2c857d143c3ee4b13adf2afe6796c40ae87e2a304b78783907a785a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://842a75d6b100069785dbaf413df3b610a5df28c9ec124100eb31c965985282fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://842a75d6b100069785dbaf413df3b610a5df28c9ec124100eb31c965985282fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4ac00f8ef346db3de91e7bec01a56eac9f27d5a501d6f686173ece4897b9694\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4ac00f8ef346db3de91e7bec01a56eac9f27d5a501d6f686173ece4897b9694\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3585437cbe0f17e5bf92a74123f0e6f13475b6e5d49b145f2b9b869559b53310\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3585437cbe0f17e5bf92a74123f0e6f13475b6e5d49b145f2b9b869559b53310\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e60aa96f2377e63e44e932efe51f79832b778b8ef41fd47e728a80286956c62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e60aa96f2377e63e44e932efe51f79832b778b8ef41fd47e728a80286956c62\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2rg4s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:57Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.511791 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f6585b3b62fc4a78f89b3413326d6d5259cb8c338936f2d5def6185d81d46f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cb72322d436f10b006a4bb7a91b255451aba90e86100fdf249be6443159bd89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d0273d889da0bd6ea3db8040da2eab62fc750f308cb9a62dee71b6c9f2fadd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e79e99eb44ff95c2f9929326313e41629e6e03ebc068537bd27bc4c89ad5b0ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20bcab2c2912120c819bacdf478db82a78500908125e6efadaef1f3409eb0d49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ead15f2a3179f03d770beadcaa1374561d5aebb4874615a334edccff0c2f300\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad2339c202e5abc4e88ebbfc57c630d5d76fd6fd5e9487a31159ae3d3ebb9970\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c8c730b3d1006b876ae704a8e5c67fd685185640e9445b7c66dab106dc505e26\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:04:53Z\\\",\\\"message\\\":\\\":53.045032 6073 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 17:04:53.045574 6073 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 17:04:53.045635 6073 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 17:04:53.045700 6073 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 17:04:53.046252 6073 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 17:04:53.046277 6073 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1205 17:04:53.046281 6073 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1205 17:04:53.046293 6073 factory.go:656] Stopping watch factory\\\\nI1205 17:04:53.046306 6073 ovnkube.go:599] Stopped ovnkube\\\\nI1205 17:04:53.046307 6073 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 17:04:53.046324 6073 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:50Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad2339c202e5abc4e88ebbfc57c630d5d76fd6fd5e9487a31159ae3d3ebb9970\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"message\\\":\\\"tion, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:54Z is after 2025-08-24T17:21:41Z]\\\\nI1205 17:04:54.770375 6204 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g\\\\nI1205 17:04:54.770514 6204 ovn.go:134] Ensuring zone local for Pod openshift-machine-config-operator/machine-config-daemon-khn68 in node crc\\\\nI1205 17:04:54.770521 6204 ovn.go:134] Ensuring zone local for Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g in node crc\\\\nI1205 17:04:54.770512 6204 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-hpl8r\\\\nI1205 17:04:54.770522 6204 obj_retry.go:386] Retry successful for *v1.Pod openshift-machine-config-operator/machine-config-daemon-khn68 after 0 failed attempt(s)\\\\nI1205 17:04:54.770531 6204 default_network_controller.go:776] Recording success event on pod openshift\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://899db0b5d45776b0560a2908ce63ec8ea2b458e2e492189737e930688b4ed2e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://073ffcef7324b498f2cb47680b64bf44e474a7732a5c2b34661da22c3e0f6c17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://073ffcef7324b498f2cb47680b64bf44e474a7732a5c2b34661da22c3e0f6c17\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-98fvv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:57Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.521553 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-f6qn6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"693c26bb-e75f-4f7f-bd2a-bdf7dcb0af06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02ea0085a961c324f4a426d68972043f4b24b3aa03aed623ebc46bbafc857d1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g4w6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-f6qn6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:57Z is after 2025-08-24T17:21:41Z" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.556102 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.556193 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.556207 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.556227 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.556239 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:57Z","lastTransitionTime":"2025-12-05T17:04:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.658608 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.658657 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.658669 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.658689 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.658702 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:57Z","lastTransitionTime":"2025-12-05T17:04:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.719505 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:04:57 crc kubenswrapper[4753]: E1205 17:04:57.719658 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.760778 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.760849 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.760873 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.760903 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.760928 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:57Z","lastTransitionTime":"2025-12-05T17:04:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.840048 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/00ab636b-9cc9-4a6f-8e6e-6442b35280ca-metrics-certs\") pod \"network-metrics-daemon-jjgfd\" (UID: \"00ab636b-9cc9-4a6f-8e6e-6442b35280ca\") " pod="openshift-multus/network-metrics-daemon-jjgfd" Dec 05 17:04:57 crc kubenswrapper[4753]: E1205 17:04:57.840271 4753 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 17:04:57 crc kubenswrapper[4753]: E1205 17:04:57.840382 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/00ab636b-9cc9-4a6f-8e6e-6442b35280ca-metrics-certs podName:00ab636b-9cc9-4a6f-8e6e-6442b35280ca nodeName:}" failed. No retries permitted until 2025-12-05 17:04:58.840354223 +0000 UTC m=+37.343461319 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/00ab636b-9cc9-4a6f-8e6e-6442b35280ca-metrics-certs") pod "network-metrics-daemon-jjgfd" (UID: "00ab636b-9cc9-4a6f-8e6e-6442b35280ca") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.863814 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.863877 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.863890 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.863907 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.863919 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:57Z","lastTransitionTime":"2025-12-05T17:04:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.966244 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.966292 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.966305 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.966326 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.966339 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:57Z","lastTransitionTime":"2025-12-05T17:04:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:57 crc kubenswrapper[4753]: I1205 17:04:57.968499 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-98fvv_f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a/ovnkube-controller/1.log" Dec 05 17:04:58 crc kubenswrapper[4753]: I1205 17:04:58.068441 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:58 crc kubenswrapper[4753]: I1205 17:04:58.068483 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:58 crc kubenswrapper[4753]: I1205 17:04:58.068492 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:58 crc kubenswrapper[4753]: I1205 17:04:58.068511 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:58 crc kubenswrapper[4753]: I1205 17:04:58.068523 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:58Z","lastTransitionTime":"2025-12-05T17:04:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:58 crc kubenswrapper[4753]: I1205 17:04:58.170644 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:58 crc kubenswrapper[4753]: I1205 17:04:58.170701 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:58 crc kubenswrapper[4753]: I1205 17:04:58.170712 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:58 crc kubenswrapper[4753]: I1205 17:04:58.170724 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:58 crc kubenswrapper[4753]: I1205 17:04:58.170736 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:58Z","lastTransitionTime":"2025-12-05T17:04:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:58 crc kubenswrapper[4753]: I1205 17:04:58.273400 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:58 crc kubenswrapper[4753]: I1205 17:04:58.273487 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:58 crc kubenswrapper[4753]: I1205 17:04:58.273523 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:58 crc kubenswrapper[4753]: I1205 17:04:58.273555 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:58 crc kubenswrapper[4753]: I1205 17:04:58.273580 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:58Z","lastTransitionTime":"2025-12-05T17:04:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:58 crc kubenswrapper[4753]: I1205 17:04:58.375812 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:58 crc kubenswrapper[4753]: I1205 17:04:58.375853 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:58 crc kubenswrapper[4753]: I1205 17:04:58.375865 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:58 crc kubenswrapper[4753]: I1205 17:04:58.375880 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:58 crc kubenswrapper[4753]: I1205 17:04:58.375896 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:58Z","lastTransitionTime":"2025-12-05T17:04:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:58 crc kubenswrapper[4753]: I1205 17:04:58.478039 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:58 crc kubenswrapper[4753]: I1205 17:04:58.478103 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:58 crc kubenswrapper[4753]: I1205 17:04:58.478114 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:58 crc kubenswrapper[4753]: I1205 17:04:58.478131 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:58 crc kubenswrapper[4753]: I1205 17:04:58.478163 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:58Z","lastTransitionTime":"2025-12-05T17:04:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:58 crc kubenswrapper[4753]: I1205 17:04:58.579989 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:58 crc kubenswrapper[4753]: I1205 17:04:58.580025 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:58 crc kubenswrapper[4753]: I1205 17:04:58.580033 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:58 crc kubenswrapper[4753]: I1205 17:04:58.580046 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:58 crc kubenswrapper[4753]: I1205 17:04:58.580056 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:58Z","lastTransitionTime":"2025-12-05T17:04:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:58 crc kubenswrapper[4753]: I1205 17:04:58.682724 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:58 crc kubenswrapper[4753]: I1205 17:04:58.682766 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:58 crc kubenswrapper[4753]: I1205 17:04:58.682782 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:58 crc kubenswrapper[4753]: I1205 17:04:58.682799 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:58 crc kubenswrapper[4753]: I1205 17:04:58.682813 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:58Z","lastTransitionTime":"2025-12-05T17:04:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:58 crc kubenswrapper[4753]: I1205 17:04:58.720320 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:04:58 crc kubenswrapper[4753]: I1205 17:04:58.720412 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:04:58 crc kubenswrapper[4753]: E1205 17:04:58.720447 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:04:58 crc kubenswrapper[4753]: I1205 17:04:58.720416 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jjgfd" Dec 05 17:04:58 crc kubenswrapper[4753]: E1205 17:04:58.720557 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:04:58 crc kubenswrapper[4753]: E1205 17:04:58.720689 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jjgfd" podUID="00ab636b-9cc9-4a6f-8e6e-6442b35280ca" Dec 05 17:04:58 crc kubenswrapper[4753]: I1205 17:04:58.785505 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:58 crc kubenswrapper[4753]: I1205 17:04:58.785558 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:58 crc kubenswrapper[4753]: I1205 17:04:58.785566 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:58 crc kubenswrapper[4753]: I1205 17:04:58.785615 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:58 crc kubenswrapper[4753]: I1205 17:04:58.785633 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:58Z","lastTransitionTime":"2025-12-05T17:04:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:58 crc kubenswrapper[4753]: I1205 17:04:58.849415 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/00ab636b-9cc9-4a6f-8e6e-6442b35280ca-metrics-certs\") pod \"network-metrics-daemon-jjgfd\" (UID: \"00ab636b-9cc9-4a6f-8e6e-6442b35280ca\") " pod="openshift-multus/network-metrics-daemon-jjgfd" Dec 05 17:04:58 crc kubenswrapper[4753]: E1205 17:04:58.849582 4753 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 17:04:58 crc kubenswrapper[4753]: E1205 17:04:58.849668 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/00ab636b-9cc9-4a6f-8e6e-6442b35280ca-metrics-certs podName:00ab636b-9cc9-4a6f-8e6e-6442b35280ca nodeName:}" failed. No retries permitted until 2025-12-05 17:05:00.849649442 +0000 UTC m=+39.352756448 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/00ab636b-9cc9-4a6f-8e6e-6442b35280ca-metrics-certs") pod "network-metrics-daemon-jjgfd" (UID: "00ab636b-9cc9-4a6f-8e6e-6442b35280ca") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 17:04:58 crc kubenswrapper[4753]: I1205 17:04:58.887699 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:58 crc kubenswrapper[4753]: I1205 17:04:58.887737 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:58 crc kubenswrapper[4753]: I1205 17:04:58.887751 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:58 crc kubenswrapper[4753]: I1205 17:04:58.887769 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:58 crc kubenswrapper[4753]: I1205 17:04:58.887780 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:58Z","lastTransitionTime":"2025-12-05T17:04:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:58 crc kubenswrapper[4753]: I1205 17:04:58.990450 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:58 crc kubenswrapper[4753]: I1205 17:04:58.990497 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:58 crc kubenswrapper[4753]: I1205 17:04:58.990509 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:58 crc kubenswrapper[4753]: I1205 17:04:58.990528 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:58 crc kubenswrapper[4753]: I1205 17:04:58.990546 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:58Z","lastTransitionTime":"2025-12-05T17:04:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:59 crc kubenswrapper[4753]: I1205 17:04:59.092895 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:59 crc kubenswrapper[4753]: I1205 17:04:59.092937 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:59 crc kubenswrapper[4753]: I1205 17:04:59.092947 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:59 crc kubenswrapper[4753]: I1205 17:04:59.092962 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:59 crc kubenswrapper[4753]: I1205 17:04:59.092973 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:59Z","lastTransitionTime":"2025-12-05T17:04:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:59 crc kubenswrapper[4753]: I1205 17:04:59.197412 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:59 crc kubenswrapper[4753]: I1205 17:04:59.197468 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:59 crc kubenswrapper[4753]: I1205 17:04:59.197483 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:59 crc kubenswrapper[4753]: I1205 17:04:59.197503 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:59 crc kubenswrapper[4753]: I1205 17:04:59.197516 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:59Z","lastTransitionTime":"2025-12-05T17:04:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:59 crc kubenswrapper[4753]: I1205 17:04:59.299516 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:59 crc kubenswrapper[4753]: I1205 17:04:59.299555 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:59 crc kubenswrapper[4753]: I1205 17:04:59.299564 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:59 crc kubenswrapper[4753]: I1205 17:04:59.299578 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:59 crc kubenswrapper[4753]: I1205 17:04:59.299588 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:59Z","lastTransitionTime":"2025-12-05T17:04:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:59 crc kubenswrapper[4753]: I1205 17:04:59.402884 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:59 crc kubenswrapper[4753]: I1205 17:04:59.402925 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:59 crc kubenswrapper[4753]: I1205 17:04:59.402936 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:59 crc kubenswrapper[4753]: I1205 17:04:59.402951 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:59 crc kubenswrapper[4753]: I1205 17:04:59.402965 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:59Z","lastTransitionTime":"2025-12-05T17:04:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:59 crc kubenswrapper[4753]: I1205 17:04:59.505263 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:59 crc kubenswrapper[4753]: I1205 17:04:59.505317 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:59 crc kubenswrapper[4753]: I1205 17:04:59.505332 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:59 crc kubenswrapper[4753]: I1205 17:04:59.505353 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:59 crc kubenswrapper[4753]: I1205 17:04:59.505371 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:59Z","lastTransitionTime":"2025-12-05T17:04:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:59 crc kubenswrapper[4753]: I1205 17:04:59.608413 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:59 crc kubenswrapper[4753]: I1205 17:04:59.608515 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:59 crc kubenswrapper[4753]: I1205 17:04:59.608535 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:59 crc kubenswrapper[4753]: I1205 17:04:59.608564 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:59 crc kubenswrapper[4753]: I1205 17:04:59.608582 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:59Z","lastTransitionTime":"2025-12-05T17:04:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:59 crc kubenswrapper[4753]: I1205 17:04:59.712350 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:59 crc kubenswrapper[4753]: I1205 17:04:59.712432 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:59 crc kubenswrapper[4753]: I1205 17:04:59.712456 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:59 crc kubenswrapper[4753]: I1205 17:04:59.712488 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:59 crc kubenswrapper[4753]: I1205 17:04:59.712512 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:59Z","lastTransitionTime":"2025-12-05T17:04:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:59 crc kubenswrapper[4753]: I1205 17:04:59.720306 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:04:59 crc kubenswrapper[4753]: E1205 17:04:59.720556 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:04:59 crc kubenswrapper[4753]: I1205 17:04:59.816333 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:59 crc kubenswrapper[4753]: I1205 17:04:59.816417 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:59 crc kubenswrapper[4753]: I1205 17:04:59.816429 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:59 crc kubenswrapper[4753]: I1205 17:04:59.816447 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:59 crc kubenswrapper[4753]: I1205 17:04:59.816461 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:59Z","lastTransitionTime":"2025-12-05T17:04:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:04:59 crc kubenswrapper[4753]: I1205 17:04:59.918995 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:04:59 crc kubenswrapper[4753]: I1205 17:04:59.919031 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:04:59 crc kubenswrapper[4753]: I1205 17:04:59.919042 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:04:59 crc kubenswrapper[4753]: I1205 17:04:59.919059 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:04:59 crc kubenswrapper[4753]: I1205 17:04:59.919071 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:04:59Z","lastTransitionTime":"2025-12-05T17:04:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:00 crc kubenswrapper[4753]: I1205 17:05:00.021304 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:00 crc kubenswrapper[4753]: I1205 17:05:00.021373 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:00 crc kubenswrapper[4753]: I1205 17:05:00.021386 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:00 crc kubenswrapper[4753]: I1205 17:05:00.021407 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:00 crc kubenswrapper[4753]: I1205 17:05:00.021421 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:00Z","lastTransitionTime":"2025-12-05T17:05:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:00 crc kubenswrapper[4753]: I1205 17:05:00.123936 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:00 crc kubenswrapper[4753]: I1205 17:05:00.123994 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:00 crc kubenswrapper[4753]: I1205 17:05:00.124006 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:00 crc kubenswrapper[4753]: I1205 17:05:00.124026 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:00 crc kubenswrapper[4753]: I1205 17:05:00.124038 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:00Z","lastTransitionTime":"2025-12-05T17:05:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:00 crc kubenswrapper[4753]: I1205 17:05:00.226818 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:00 crc kubenswrapper[4753]: I1205 17:05:00.226857 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:00 crc kubenswrapper[4753]: I1205 17:05:00.226866 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:00 crc kubenswrapper[4753]: I1205 17:05:00.226882 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:00 crc kubenswrapper[4753]: I1205 17:05:00.226891 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:00Z","lastTransitionTime":"2025-12-05T17:05:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:00 crc kubenswrapper[4753]: I1205 17:05:00.328928 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:00 crc kubenswrapper[4753]: I1205 17:05:00.328979 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:00 crc kubenswrapper[4753]: I1205 17:05:00.328990 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:00 crc kubenswrapper[4753]: I1205 17:05:00.329007 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:00 crc kubenswrapper[4753]: I1205 17:05:00.329020 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:00Z","lastTransitionTime":"2025-12-05T17:05:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:00 crc kubenswrapper[4753]: I1205 17:05:00.431462 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:00 crc kubenswrapper[4753]: I1205 17:05:00.431524 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:00 crc kubenswrapper[4753]: I1205 17:05:00.431547 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:00 crc kubenswrapper[4753]: I1205 17:05:00.431576 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:00 crc kubenswrapper[4753]: I1205 17:05:00.431601 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:00Z","lastTransitionTime":"2025-12-05T17:05:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:00 crc kubenswrapper[4753]: I1205 17:05:00.534536 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:00 crc kubenswrapper[4753]: I1205 17:05:00.534594 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:00 crc kubenswrapper[4753]: I1205 17:05:00.534605 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:00 crc kubenswrapper[4753]: I1205 17:05:00.534623 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:00 crc kubenswrapper[4753]: I1205 17:05:00.534634 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:00Z","lastTransitionTime":"2025-12-05T17:05:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:00 crc kubenswrapper[4753]: I1205 17:05:00.637192 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:00 crc kubenswrapper[4753]: I1205 17:05:00.637235 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:00 crc kubenswrapper[4753]: I1205 17:05:00.637243 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:00 crc kubenswrapper[4753]: I1205 17:05:00.637266 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:00 crc kubenswrapper[4753]: I1205 17:05:00.637282 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:00Z","lastTransitionTime":"2025-12-05T17:05:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:00 crc kubenswrapper[4753]: I1205 17:05:00.720274 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jjgfd" Dec 05 17:05:00 crc kubenswrapper[4753]: I1205 17:05:00.720274 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:05:00 crc kubenswrapper[4753]: I1205 17:05:00.720302 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:05:00 crc kubenswrapper[4753]: E1205 17:05:00.720459 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jjgfd" podUID="00ab636b-9cc9-4a6f-8e6e-6442b35280ca" Dec 05 17:05:00 crc kubenswrapper[4753]: E1205 17:05:00.720629 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:05:00 crc kubenswrapper[4753]: E1205 17:05:00.720833 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:05:00 crc kubenswrapper[4753]: I1205 17:05:00.739540 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:00 crc kubenswrapper[4753]: I1205 17:05:00.739581 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:00 crc kubenswrapper[4753]: I1205 17:05:00.739596 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:00 crc kubenswrapper[4753]: I1205 17:05:00.739615 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:00 crc kubenswrapper[4753]: I1205 17:05:00.739626 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:00Z","lastTransitionTime":"2025-12-05T17:05:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:00 crc kubenswrapper[4753]: I1205 17:05:00.842100 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:00 crc kubenswrapper[4753]: I1205 17:05:00.842168 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:00 crc kubenswrapper[4753]: I1205 17:05:00.842179 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:00 crc kubenswrapper[4753]: I1205 17:05:00.842194 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:00 crc kubenswrapper[4753]: I1205 17:05:00.842206 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:00Z","lastTransitionTime":"2025-12-05T17:05:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:00 crc kubenswrapper[4753]: I1205 17:05:00.871418 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/00ab636b-9cc9-4a6f-8e6e-6442b35280ca-metrics-certs\") pod \"network-metrics-daemon-jjgfd\" (UID: \"00ab636b-9cc9-4a6f-8e6e-6442b35280ca\") " pod="openshift-multus/network-metrics-daemon-jjgfd" Dec 05 17:05:00 crc kubenswrapper[4753]: E1205 17:05:00.871578 4753 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 17:05:00 crc kubenswrapper[4753]: E1205 17:05:00.871628 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/00ab636b-9cc9-4a6f-8e6e-6442b35280ca-metrics-certs podName:00ab636b-9cc9-4a6f-8e6e-6442b35280ca nodeName:}" failed. No retries permitted until 2025-12-05 17:05:04.871614674 +0000 UTC m=+43.374721680 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/00ab636b-9cc9-4a6f-8e6e-6442b35280ca-metrics-certs") pod "network-metrics-daemon-jjgfd" (UID: "00ab636b-9cc9-4a6f-8e6e-6442b35280ca") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 17:05:00 crc kubenswrapper[4753]: I1205 17:05:00.945059 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:00 crc kubenswrapper[4753]: I1205 17:05:00.945138 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:00 crc kubenswrapper[4753]: I1205 17:05:00.945196 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:00 crc kubenswrapper[4753]: I1205 17:05:00.945225 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:00 crc kubenswrapper[4753]: I1205 17:05:00.945245 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:00Z","lastTransitionTime":"2025-12-05T17:05:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:01 crc kubenswrapper[4753]: I1205 17:05:01.049309 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:01 crc kubenswrapper[4753]: I1205 17:05:01.049365 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:01 crc kubenswrapper[4753]: I1205 17:05:01.049376 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:01 crc kubenswrapper[4753]: I1205 17:05:01.049393 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:01 crc kubenswrapper[4753]: I1205 17:05:01.049403 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:01Z","lastTransitionTime":"2025-12-05T17:05:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:01 crc kubenswrapper[4753]: I1205 17:05:01.151805 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:01 crc kubenswrapper[4753]: I1205 17:05:01.151879 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:01 crc kubenswrapper[4753]: I1205 17:05:01.151894 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:01 crc kubenswrapper[4753]: I1205 17:05:01.151923 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:01 crc kubenswrapper[4753]: I1205 17:05:01.151949 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:01Z","lastTransitionTime":"2025-12-05T17:05:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:01 crc kubenswrapper[4753]: I1205 17:05:01.255373 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:01 crc kubenswrapper[4753]: I1205 17:05:01.255424 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:01 crc kubenswrapper[4753]: I1205 17:05:01.255517 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:01 crc kubenswrapper[4753]: I1205 17:05:01.255542 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:01 crc kubenswrapper[4753]: I1205 17:05:01.255560 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:01Z","lastTransitionTime":"2025-12-05T17:05:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:01 crc kubenswrapper[4753]: I1205 17:05:01.358189 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:01 crc kubenswrapper[4753]: I1205 17:05:01.358247 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:01 crc kubenswrapper[4753]: I1205 17:05:01.358262 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:01 crc kubenswrapper[4753]: I1205 17:05:01.358288 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:01 crc kubenswrapper[4753]: I1205 17:05:01.358305 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:01Z","lastTransitionTime":"2025-12-05T17:05:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:01 crc kubenswrapper[4753]: I1205 17:05:01.461559 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:01 crc kubenswrapper[4753]: I1205 17:05:01.461601 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:01 crc kubenswrapper[4753]: I1205 17:05:01.461610 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:01 crc kubenswrapper[4753]: I1205 17:05:01.461624 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:01 crc kubenswrapper[4753]: I1205 17:05:01.461633 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:01Z","lastTransitionTime":"2025-12-05T17:05:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:01 crc kubenswrapper[4753]: I1205 17:05:01.564995 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:01 crc kubenswrapper[4753]: I1205 17:05:01.565027 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:01 crc kubenswrapper[4753]: I1205 17:05:01.565036 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:01 crc kubenswrapper[4753]: I1205 17:05:01.565048 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:01 crc kubenswrapper[4753]: I1205 17:05:01.565057 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:01Z","lastTransitionTime":"2025-12-05T17:05:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:01 crc kubenswrapper[4753]: I1205 17:05:01.667499 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:01 crc kubenswrapper[4753]: I1205 17:05:01.667534 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:01 crc kubenswrapper[4753]: I1205 17:05:01.667544 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:01 crc kubenswrapper[4753]: I1205 17:05:01.667559 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:01 crc kubenswrapper[4753]: I1205 17:05:01.668105 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:01Z","lastTransitionTime":"2025-12-05T17:05:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:01 crc kubenswrapper[4753]: I1205 17:05:01.720232 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:05:01 crc kubenswrapper[4753]: E1205 17:05:01.720370 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:05:01 crc kubenswrapper[4753]: I1205 17:05:01.738233 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vj5f7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f38be29-f040-4e7d-9026-36929c0c5cda\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b5d403644e00b367518058ecf22ed9913df9a1353e2fede8802deaa288a48e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc2cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vj5f7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:01Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:01 crc kubenswrapper[4753]: I1205 17:05:01.754931 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:01Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:01 crc kubenswrapper[4753]: I1205 17:05:01.769806 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:01Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:01 crc kubenswrapper[4753]: I1205 17:05:01.771177 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:01 crc kubenswrapper[4753]: I1205 17:05:01.771218 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:01 crc kubenswrapper[4753]: I1205 17:05:01.771231 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:01 crc kubenswrapper[4753]: I1205 17:05:01.771252 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:01 crc kubenswrapper[4753]: I1205 17:05:01.771266 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:01Z","lastTransitionTime":"2025-12-05T17:05:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:01 crc kubenswrapper[4753]: I1205 17:05:01.782716 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:01Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:01 crc kubenswrapper[4753]: I1205 17:05:01.799031 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2rg4s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"751d4d21-4eb8-4236-bca2-d81f094ff2f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bda355ba818e87604cf7cfba53f0ee3116f2d4234e7d9631ec7e6571ed34030\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa8d4e5672a07ea28ddba02eaaf603750cd3067ff47165776d2dc7f2ad42a290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa8d4e5672a07ea28ddba02eaaf603750cd3067ff47165776d2dc7f2ad42a290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87e685f0d2c857d143c3ee4b13adf2afe6796c40ae87e2a304b78783907a785a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87e685f0d2c857d143c3ee4b13adf2afe6796c40ae87e2a304b78783907a785a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://842a75d6b100069785dbaf413df3b610a5df28c9ec124100eb31c965985282fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://842a75d6b100069785dbaf413df3b610a5df28c9ec124100eb31c965985282fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4ac00f8ef346db3de91e7bec01a56eac9f27d5a501d6f686173ece4897b9694\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4ac00f8ef346db3de91e7bec01a56eac9f27d5a501d6f686173ece4897b9694\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3585437cbe0f17e5bf92a74123f0e6f13475b6e5d49b145f2b9b869559b53310\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3585437cbe0f17e5bf92a74123f0e6f13475b6e5d49b145f2b9b869559b53310\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e60aa96f2377e63e44e932efe51f79832b778b8ef41fd47e728a80286956c62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e60aa96f2377e63e44e932efe51f79832b778b8ef41fd47e728a80286956c62\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2rg4s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:01Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:01 crc kubenswrapper[4753]: I1205 17:05:01.834678 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f6585b3b62fc4a78f89b3413326d6d5259cb8c338936f2d5def6185d81d46f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cb72322d436f10b006a4bb7a91b255451aba90e86100fdf249be6443159bd89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d0273d889da0bd6ea3db8040da2eab62fc750f308cb9a62dee71b6c9f2fadd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e79e99eb44ff95c2f9929326313e41629e6e03ebc068537bd27bc4c89ad5b0ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20bcab2c2912120c819bacdf478db82a78500908125e6efadaef1f3409eb0d49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ead15f2a3179f03d770beadcaa1374561d5aebb4874615a334edccff0c2f300\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad2339c202e5abc4e88ebbfc57c630d5d76fd6fd5e9487a31159ae3d3ebb9970\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c8c730b3d1006b876ae704a8e5c67fd685185640e9445b7c66dab106dc505e26\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:04:53Z\\\",\\\"message\\\":\\\":53.045032 6073 reflector.go:311] Stopping reflector *v1.EgressService (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressservice/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 17:04:53.045574 6073 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 17:04:53.045635 6073 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 17:04:53.045700 6073 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 17:04:53.046252 6073 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 17:04:53.046277 6073 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1205 17:04:53.046281 6073 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1205 17:04:53.046293 6073 factory.go:656] Stopping watch factory\\\\nI1205 17:04:53.046306 6073 ovnkube.go:599] Stopped ovnkube\\\\nI1205 17:04:53.046307 6073 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 17:04:53.046324 6073 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:50Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad2339c202e5abc4e88ebbfc57c630d5d76fd6fd5e9487a31159ae3d3ebb9970\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"message\\\":\\\"tion, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:54Z is after 2025-08-24T17:21:41Z]\\\\nI1205 17:04:54.770375 6204 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g\\\\nI1205 17:04:54.770514 6204 ovn.go:134] Ensuring zone local for Pod openshift-machine-config-operator/machine-config-daemon-khn68 in node crc\\\\nI1205 17:04:54.770521 6204 ovn.go:134] Ensuring zone local for Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g in node crc\\\\nI1205 17:04:54.770512 6204 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-hpl8r\\\\nI1205 17:04:54.770522 6204 obj_retry.go:386] Retry successful for *v1.Pod openshift-machine-config-operator/machine-config-daemon-khn68 after 0 failed attempt(s)\\\\nI1205 17:04:54.770531 6204 default_network_controller.go:776] Recording success event on pod openshift\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://899db0b5d45776b0560a2908ce63ec8ea2b458e2e492189737e930688b4ed2e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://073ffcef7324b498f2cb47680b64bf44e474a7732a5c2b34661da22c3e0f6c17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://073ffcef7324b498f2cb47680b64bf44e474a7732a5c2b34661da22c3e0f6c17\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-98fvv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:01Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:01 crc kubenswrapper[4753]: I1205 17:05:01.848597 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-f6qn6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"693c26bb-e75f-4f7f-bd2a-bdf7dcb0af06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02ea0085a961c324f4a426d68972043f4b24b3aa03aed623ebc46bbafc857d1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g4w6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-f6qn6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:01Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:01 crc kubenswrapper[4753]: I1205 17:05:01.864499 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bhvk4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"338d3da9-209c-4ca9-a37d-6ea5731d1622\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7747e4738055a7b994119bad9b30d47e0b510c7407c53df29b674c553dbbcbb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qh6zg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://097885ac231f081a81d51cf0091df6f93ca49068340438454db429194e5475b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qh6zg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-bhvk4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:01Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:01 crc kubenswrapper[4753]: I1205 17:05:01.873827 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:01 crc kubenswrapper[4753]: I1205 17:05:01.873863 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:01 crc kubenswrapper[4753]: I1205 17:05:01.873875 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:01 crc kubenswrapper[4753]: I1205 17:05:01.873893 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:01 crc kubenswrapper[4753]: I1205 17:05:01.873906 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:01Z","lastTransitionTime":"2025-12-05T17:05:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:01 crc kubenswrapper[4753]: I1205 17:05:01.878203 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65284411-042e-46b9-8e55-cabc9e78a397\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e83b48779d4b8b709f9bd7351180040315002f738ada5ab034c883f87ef8734\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f6e27e109eeb52dae4e4b35bdf84e78f0786fc8d15c2498dd8d8c70c3d64e4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7886d72b05768ec1d515cfb2837f52ae16e82445b72a3016762308b80892d55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a662a8ca2ca9bcf02ace463bf26e44293b9776c69f3f1f84110392c3cab6e83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:01Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:01 crc kubenswrapper[4753]: I1205 17:05:01.890627 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://408690fbd818b50273c914cc463acec30ff77bbf7d26680bffa2451a1d1ddcf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:01Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:01 crc kubenswrapper[4753]: I1205 17:05:01.903117 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78c3b80f4d08dc9fd4215fc87bbd7331f765f301fa587f5fd4ded54cd8db6587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60c5d0ca3d26171050d592c3d76f0db4ca1a12344c40825a8e7c5fd579814c80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-khn68\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:01Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:01 crc kubenswrapper[4753]: I1205 17:05:01.915700 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-jjgfd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"00ab636b-9cc9-4a6f-8e6e-6442b35280ca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhlg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhlg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:57Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-jjgfd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:01Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:01 crc kubenswrapper[4753]: I1205 17:05:01.945973 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hpl8r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b2cc49130697a15123f202462d2f0781903baea370d4906d22d1b39c23320d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmzrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hpl8r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:01Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:01 crc kubenswrapper[4753]: I1205 17:05:01.960343 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee37fdbe-64d5-4bb1-8522-932d83e0648e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://195b8bf18e6cf937def214e377ec9def799c19ea4d5f008b66cc42048656ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a5f37b26bfe7933088e08379662250e7b188efbbb5784d49e8d86262e10416\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0061bf86592b301a0a9398d2938e614f76616ed7ddcf310aa50c961632fb58b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20eb41ef00b7711fdfef0e4f3bd83b2fbdb193be96cafc0ef87f048271e7dd78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fea878c0275a3934d26327c13cf5a93ae3b881132788f3d031af5b382bbdd02\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 17:04:35.064086 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 17:04:35.066802 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1773129596/tls.crt::/tmp/serving-cert-1773129596/tls.key\\\\\\\"\\\\nI1205 17:04:40.565607 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 17:04:40.585450 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 17:04:40.585477 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 17:04:40.585506 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 17:04:40.585513 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:04:40.600592 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:04:40.600617 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600622 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600626 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:04:40.600629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:04:40.600632 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:04:40.600635 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:04:40.600704 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 17:04:40.602558 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0585e546af65a61265267b97083556b811ad35ee6c48fa84262b3f4f25eaf907\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:01Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:01 crc kubenswrapper[4753]: I1205 17:05:01.973244 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8934d1ec7490b421708e4678d69a55cdd381563ada3d945d066fcf4abba49f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e799acff1e50a664e95e30896e435bc77a35c8560044c5f180e16c3534bcf828\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:01Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:01 crc kubenswrapper[4753]: I1205 17:05:01.975822 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:01 crc kubenswrapper[4753]: I1205 17:05:01.975858 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:01 crc kubenswrapper[4753]: I1205 17:05:01.975871 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:01 crc kubenswrapper[4753]: I1205 17:05:01.975887 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:01 crc kubenswrapper[4753]: I1205 17:05:01.975899 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:01Z","lastTransitionTime":"2025-12-05T17:05:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:01 crc kubenswrapper[4753]: I1205 17:05:01.985309 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c52dbe30298514a572fd7cf4a301357ef0290af5894be7f9d08aff6c3fbe85a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:01Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:02 crc kubenswrapper[4753]: I1205 17:05:02.078462 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:02 crc kubenswrapper[4753]: I1205 17:05:02.078503 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:02 crc kubenswrapper[4753]: I1205 17:05:02.078514 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:02 crc kubenswrapper[4753]: I1205 17:05:02.078529 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:02 crc kubenswrapper[4753]: I1205 17:05:02.078540 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:02Z","lastTransitionTime":"2025-12-05T17:05:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:02 crc kubenswrapper[4753]: I1205 17:05:02.180962 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:02 crc kubenswrapper[4753]: I1205 17:05:02.181028 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:02 crc kubenswrapper[4753]: I1205 17:05:02.181037 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:02 crc kubenswrapper[4753]: I1205 17:05:02.181069 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:02 crc kubenswrapper[4753]: I1205 17:05:02.181081 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:02Z","lastTransitionTime":"2025-12-05T17:05:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:02 crc kubenswrapper[4753]: I1205 17:05:02.284226 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:02 crc kubenswrapper[4753]: I1205 17:05:02.284280 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:02 crc kubenswrapper[4753]: I1205 17:05:02.284297 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:02 crc kubenswrapper[4753]: I1205 17:05:02.284330 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:02 crc kubenswrapper[4753]: I1205 17:05:02.284383 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:02Z","lastTransitionTime":"2025-12-05T17:05:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:02 crc kubenswrapper[4753]: I1205 17:05:02.387345 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:02 crc kubenswrapper[4753]: I1205 17:05:02.387381 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:02 crc kubenswrapper[4753]: I1205 17:05:02.387393 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:02 crc kubenswrapper[4753]: I1205 17:05:02.387406 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:02 crc kubenswrapper[4753]: I1205 17:05:02.387417 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:02Z","lastTransitionTime":"2025-12-05T17:05:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:02 crc kubenswrapper[4753]: I1205 17:05:02.489516 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:02 crc kubenswrapper[4753]: I1205 17:05:02.489567 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:02 crc kubenswrapper[4753]: I1205 17:05:02.489579 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:02 crc kubenswrapper[4753]: I1205 17:05:02.489599 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:02 crc kubenswrapper[4753]: I1205 17:05:02.489616 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:02Z","lastTransitionTime":"2025-12-05T17:05:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:02 crc kubenswrapper[4753]: I1205 17:05:02.592167 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:02 crc kubenswrapper[4753]: I1205 17:05:02.592209 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:02 crc kubenswrapper[4753]: I1205 17:05:02.592220 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:02 crc kubenswrapper[4753]: I1205 17:05:02.592234 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:02 crc kubenswrapper[4753]: I1205 17:05:02.592245 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:02Z","lastTransitionTime":"2025-12-05T17:05:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:02 crc kubenswrapper[4753]: I1205 17:05:02.694977 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:02 crc kubenswrapper[4753]: I1205 17:05:02.695021 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:02 crc kubenswrapper[4753]: I1205 17:05:02.695030 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:02 crc kubenswrapper[4753]: I1205 17:05:02.695045 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:02 crc kubenswrapper[4753]: I1205 17:05:02.695054 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:02Z","lastTransitionTime":"2025-12-05T17:05:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:02 crc kubenswrapper[4753]: I1205 17:05:02.720456 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:05:02 crc kubenswrapper[4753]: I1205 17:05:02.720482 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:05:02 crc kubenswrapper[4753]: E1205 17:05:02.720783 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:05:02 crc kubenswrapper[4753]: E1205 17:05:02.720787 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:05:02 crc kubenswrapper[4753]: I1205 17:05:02.720500 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jjgfd" Dec 05 17:05:02 crc kubenswrapper[4753]: E1205 17:05:02.721125 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jjgfd" podUID="00ab636b-9cc9-4a6f-8e6e-6442b35280ca" Dec 05 17:05:02 crc kubenswrapper[4753]: I1205 17:05:02.733599 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" Dec 05 17:05:02 crc kubenswrapper[4753]: I1205 17:05:02.734519 4753 scope.go:117] "RemoveContainer" containerID="ad2339c202e5abc4e88ebbfc57c630d5d76fd6fd5e9487a31159ae3d3ebb9970" Dec 05 17:05:02 crc kubenswrapper[4753]: E1205 17:05:02.734750 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-98fvv_openshift-ovn-kubernetes(f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a)\"" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" podUID="f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" Dec 05 17:05:02 crc kubenswrapper[4753]: I1205 17:05:02.744504 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-f6qn6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"693c26bb-e75f-4f7f-bd2a-bdf7dcb0af06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02ea0085a961c324f4a426d68972043f4b24b3aa03aed623ebc46bbafc857d1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g4w6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-f6qn6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:02Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:02 crc kubenswrapper[4753]: I1205 17:05:02.755481 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bhvk4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"338d3da9-209c-4ca9-a37d-6ea5731d1622\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7747e4738055a7b994119bad9b30d47e0b510c7407c53df29b674c553dbbcbb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qh6zg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://097885ac231f081a81d51cf0091df6f93ca49068340438454db429194e5475b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qh6zg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-bhvk4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:02Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:02 crc kubenswrapper[4753]: I1205 17:05:02.767275 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65284411-042e-46b9-8e55-cabc9e78a397\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e83b48779d4b8b709f9bd7351180040315002f738ada5ab034c883f87ef8734\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f6e27e109eeb52dae4e4b35bdf84e78f0786fc8d15c2498dd8d8c70c3d64e4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7886d72b05768ec1d515cfb2837f52ae16e82445b72a3016762308b80892d55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a662a8ca2ca9bcf02ace463bf26e44293b9776c69f3f1f84110392c3cab6e83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:02Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:02 crc kubenswrapper[4753]: I1205 17:05:02.783388 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2rg4s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"751d4d21-4eb8-4236-bca2-d81f094ff2f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bda355ba818e87604cf7cfba53f0ee3116f2d4234e7d9631ec7e6571ed34030\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa8d4e5672a07ea28ddba02eaaf603750cd3067ff47165776d2dc7f2ad42a290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa8d4e5672a07ea28ddba02eaaf603750cd3067ff47165776d2dc7f2ad42a290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87e685f0d2c857d143c3ee4b13adf2afe6796c40ae87e2a304b78783907a785a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87e685f0d2c857d143c3ee4b13adf2afe6796c40ae87e2a304b78783907a785a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://842a75d6b100069785dbaf413df3b610a5df28c9ec124100eb31c965985282fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://842a75d6b100069785dbaf413df3b610a5df28c9ec124100eb31c965985282fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4ac00f8ef346db3de91e7bec01a56eac9f27d5a501d6f686173ece4897b9694\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4ac00f8ef346db3de91e7bec01a56eac9f27d5a501d6f686173ece4897b9694\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3585437cbe0f17e5bf92a74123f0e6f13475b6e5d49b145f2b9b869559b53310\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3585437cbe0f17e5bf92a74123f0e6f13475b6e5d49b145f2b9b869559b53310\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e60aa96f2377e63e44e932efe51f79832b778b8ef41fd47e728a80286956c62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e60aa96f2377e63e44e932efe51f79832b778b8ef41fd47e728a80286956c62\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2rg4s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:02Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:02 crc kubenswrapper[4753]: I1205 17:05:02.797709 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:02 crc kubenswrapper[4753]: I1205 17:05:02.797763 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:02 crc kubenswrapper[4753]: I1205 17:05:02.797772 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:02 crc kubenswrapper[4753]: I1205 17:05:02.797788 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:02 crc kubenswrapper[4753]: I1205 17:05:02.797801 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:02Z","lastTransitionTime":"2025-12-05T17:05:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:02 crc kubenswrapper[4753]: I1205 17:05:02.802290 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f6585b3b62fc4a78f89b3413326d6d5259cb8c338936f2d5def6185d81d46f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cb72322d436f10b006a4bb7a91b255451aba90e86100fdf249be6443159bd89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d0273d889da0bd6ea3db8040da2eab62fc750f308cb9a62dee71b6c9f2fadd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e79e99eb44ff95c2f9929326313e41629e6e03ebc068537bd27bc4c89ad5b0ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20bcab2c2912120c819bacdf478db82a78500908125e6efadaef1f3409eb0d49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ead15f2a3179f03d770beadcaa1374561d5aebb4874615a334edccff0c2f300\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad2339c202e5abc4e88ebbfc57c630d5d76fd6fd5e9487a31159ae3d3ebb9970\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad2339c202e5abc4e88ebbfc57c630d5d76fd6fd5e9487a31159ae3d3ebb9970\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"message\\\":\\\"tion, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:54Z is after 2025-08-24T17:21:41Z]\\\\nI1205 17:04:54.770375 6204 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g\\\\nI1205 17:04:54.770514 6204 ovn.go:134] Ensuring zone local for Pod openshift-machine-config-operator/machine-config-daemon-khn68 in node crc\\\\nI1205 17:04:54.770521 6204 ovn.go:134] Ensuring zone local for Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g in node crc\\\\nI1205 17:04:54.770512 6204 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-hpl8r\\\\nI1205 17:04:54.770522 6204 obj_retry.go:386] Retry successful for *v1.Pod openshift-machine-config-operator/machine-config-daemon-khn68 after 0 failed attempt(s)\\\\nI1205 17:04:54.770531 6204 default_network_controller.go:776] Recording success event on pod openshift\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:54Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-98fvv_openshift-ovn-kubernetes(f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://899db0b5d45776b0560a2908ce63ec8ea2b458e2e492189737e930688b4ed2e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://073ffcef7324b498f2cb47680b64bf44e474a7732a5c2b34661da22c3e0f6c17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://073ffcef7324b498f2cb47680b64bf44e474a7732a5c2b34661da22c3e0f6c17\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-98fvv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:02Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:02 crc kubenswrapper[4753]: I1205 17:05:02.813328 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://408690fbd818b50273c914cc463acec30ff77bbf7d26680bffa2451a1d1ddcf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:02Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:02 crc kubenswrapper[4753]: I1205 17:05:02.823415 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78c3b80f4d08dc9fd4215fc87bbd7331f765f301fa587f5fd4ded54cd8db6587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60c5d0ca3d26171050d592c3d76f0db4ca1a12344c40825a8e7c5fd579814c80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-khn68\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:02Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:02 crc kubenswrapper[4753]: I1205 17:05:02.834052 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-jjgfd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"00ab636b-9cc9-4a6f-8e6e-6442b35280ca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhlg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhlg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:57Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-jjgfd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:02Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:02 crc kubenswrapper[4753]: I1205 17:05:02.847681 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee37fdbe-64d5-4bb1-8522-932d83e0648e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://195b8bf18e6cf937def214e377ec9def799c19ea4d5f008b66cc42048656ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a5f37b26bfe7933088e08379662250e7b188efbbb5784d49e8d86262e10416\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0061bf86592b301a0a9398d2938e614f76616ed7ddcf310aa50c961632fb58b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20eb41ef00b7711fdfef0e4f3bd83b2fbdb193be96cafc0ef87f048271e7dd78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fea878c0275a3934d26327c13cf5a93ae3b881132788f3d031af5b382bbdd02\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 17:04:35.064086 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 17:04:35.066802 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1773129596/tls.crt::/tmp/serving-cert-1773129596/tls.key\\\\\\\"\\\\nI1205 17:04:40.565607 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 17:04:40.585450 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 17:04:40.585477 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 17:04:40.585506 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 17:04:40.585513 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:04:40.600592 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:04:40.600617 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600622 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600626 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:04:40.600629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:04:40.600632 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:04:40.600635 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:04:40.600704 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 17:04:40.602558 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0585e546af65a61265267b97083556b811ad35ee6c48fa84262b3f4f25eaf907\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:02Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:02 crc kubenswrapper[4753]: I1205 17:05:02.862464 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8934d1ec7490b421708e4678d69a55cdd381563ada3d945d066fcf4abba49f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e799acff1e50a664e95e30896e435bc77a35c8560044c5f180e16c3534bcf828\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:02Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:02 crc kubenswrapper[4753]: I1205 17:05:02.874131 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c52dbe30298514a572fd7cf4a301357ef0290af5894be7f9d08aff6c3fbe85a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:02Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:02 crc kubenswrapper[4753]: I1205 17:05:02.886277 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hpl8r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b2cc49130697a15123f202462d2f0781903baea370d4906d22d1b39c23320d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmzrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hpl8r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:02Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:02 crc kubenswrapper[4753]: I1205 17:05:02.898704 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:02Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:02 crc kubenswrapper[4753]: I1205 17:05:02.899858 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:02 crc kubenswrapper[4753]: I1205 17:05:02.899899 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:02 crc kubenswrapper[4753]: I1205 17:05:02.899909 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:02 crc kubenswrapper[4753]: I1205 17:05:02.899926 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:02 crc kubenswrapper[4753]: I1205 17:05:02.899936 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:02Z","lastTransitionTime":"2025-12-05T17:05:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:02 crc kubenswrapper[4753]: I1205 17:05:02.913237 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:02Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:02 crc kubenswrapper[4753]: I1205 17:05:02.927075 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:02Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:02 crc kubenswrapper[4753]: I1205 17:05:02.938859 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vj5f7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f38be29-f040-4e7d-9026-36929c0c5cda\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b5d403644e00b367518058ecf22ed9913df9a1353e2fede8802deaa288a48e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc2cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vj5f7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:02Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:03 crc kubenswrapper[4753]: I1205 17:05:03.002256 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:03 crc kubenswrapper[4753]: I1205 17:05:03.002293 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:03 crc kubenswrapper[4753]: I1205 17:05:03.002327 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:03 crc kubenswrapper[4753]: I1205 17:05:03.002343 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:03 crc kubenswrapper[4753]: I1205 17:05:03.002354 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:03Z","lastTransitionTime":"2025-12-05T17:05:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:03 crc kubenswrapper[4753]: I1205 17:05:03.106031 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:03 crc kubenswrapper[4753]: I1205 17:05:03.106108 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:03 crc kubenswrapper[4753]: I1205 17:05:03.106127 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:03 crc kubenswrapper[4753]: I1205 17:05:03.106190 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:03 crc kubenswrapper[4753]: I1205 17:05:03.106210 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:03Z","lastTransitionTime":"2025-12-05T17:05:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:03 crc kubenswrapper[4753]: I1205 17:05:03.208940 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:03 crc kubenswrapper[4753]: I1205 17:05:03.209614 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:03 crc kubenswrapper[4753]: I1205 17:05:03.209687 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:03 crc kubenswrapper[4753]: I1205 17:05:03.209836 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:03 crc kubenswrapper[4753]: I1205 17:05:03.209930 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:03Z","lastTransitionTime":"2025-12-05T17:05:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:03 crc kubenswrapper[4753]: I1205 17:05:03.312622 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:03 crc kubenswrapper[4753]: I1205 17:05:03.312659 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:03 crc kubenswrapper[4753]: I1205 17:05:03.312668 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:03 crc kubenswrapper[4753]: I1205 17:05:03.312682 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:03 crc kubenswrapper[4753]: I1205 17:05:03.312691 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:03Z","lastTransitionTime":"2025-12-05T17:05:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:03 crc kubenswrapper[4753]: I1205 17:05:03.415320 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:03 crc kubenswrapper[4753]: I1205 17:05:03.415362 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:03 crc kubenswrapper[4753]: I1205 17:05:03.415372 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:03 crc kubenswrapper[4753]: I1205 17:05:03.415391 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:03 crc kubenswrapper[4753]: I1205 17:05:03.415403 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:03Z","lastTransitionTime":"2025-12-05T17:05:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:03 crc kubenswrapper[4753]: I1205 17:05:03.518670 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:03 crc kubenswrapper[4753]: I1205 17:05:03.518719 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:03 crc kubenswrapper[4753]: I1205 17:05:03.518730 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:03 crc kubenswrapper[4753]: I1205 17:05:03.518749 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:03 crc kubenswrapper[4753]: I1205 17:05:03.518759 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:03Z","lastTransitionTime":"2025-12-05T17:05:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:03 crc kubenswrapper[4753]: I1205 17:05:03.622408 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:03 crc kubenswrapper[4753]: I1205 17:05:03.622471 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:03 crc kubenswrapper[4753]: I1205 17:05:03.622489 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:03 crc kubenswrapper[4753]: I1205 17:05:03.622522 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:03 crc kubenswrapper[4753]: I1205 17:05:03.622540 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:03Z","lastTransitionTime":"2025-12-05T17:05:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:03 crc kubenswrapper[4753]: I1205 17:05:03.719908 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:05:03 crc kubenswrapper[4753]: E1205 17:05:03.720070 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:05:03 crc kubenswrapper[4753]: I1205 17:05:03.725419 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:03 crc kubenswrapper[4753]: I1205 17:05:03.725459 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:03 crc kubenswrapper[4753]: I1205 17:05:03.725469 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:03 crc kubenswrapper[4753]: I1205 17:05:03.725483 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:03 crc kubenswrapper[4753]: I1205 17:05:03.725493 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:03Z","lastTransitionTime":"2025-12-05T17:05:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:03 crc kubenswrapper[4753]: I1205 17:05:03.828356 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:03 crc kubenswrapper[4753]: I1205 17:05:03.828404 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:03 crc kubenswrapper[4753]: I1205 17:05:03.828420 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:03 crc kubenswrapper[4753]: I1205 17:05:03.828444 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:03 crc kubenswrapper[4753]: I1205 17:05:03.828461 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:03Z","lastTransitionTime":"2025-12-05T17:05:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:03 crc kubenswrapper[4753]: I1205 17:05:03.932067 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:03 crc kubenswrapper[4753]: I1205 17:05:03.932136 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:03 crc kubenswrapper[4753]: I1205 17:05:03.932203 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:03 crc kubenswrapper[4753]: I1205 17:05:03.932234 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:03 crc kubenswrapper[4753]: I1205 17:05:03.932256 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:03Z","lastTransitionTime":"2025-12-05T17:05:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:04 crc kubenswrapper[4753]: I1205 17:05:04.035294 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:04 crc kubenswrapper[4753]: I1205 17:05:04.035337 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:04 crc kubenswrapper[4753]: I1205 17:05:04.035348 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:04 crc kubenswrapper[4753]: I1205 17:05:04.035364 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:04 crc kubenswrapper[4753]: I1205 17:05:04.035375 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:04Z","lastTransitionTime":"2025-12-05T17:05:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:04 crc kubenswrapper[4753]: I1205 17:05:04.137935 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:04 crc kubenswrapper[4753]: I1205 17:05:04.137988 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:04 crc kubenswrapper[4753]: I1205 17:05:04.138003 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:04 crc kubenswrapper[4753]: I1205 17:05:04.138025 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:04 crc kubenswrapper[4753]: I1205 17:05:04.138040 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:04Z","lastTransitionTime":"2025-12-05T17:05:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:04 crc kubenswrapper[4753]: I1205 17:05:04.241118 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:04 crc kubenswrapper[4753]: I1205 17:05:04.241192 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:04 crc kubenswrapper[4753]: I1205 17:05:04.241202 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:04 crc kubenswrapper[4753]: I1205 17:05:04.241221 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:04 crc kubenswrapper[4753]: I1205 17:05:04.241231 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:04Z","lastTransitionTime":"2025-12-05T17:05:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:04 crc kubenswrapper[4753]: I1205 17:05:04.343307 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:04 crc kubenswrapper[4753]: I1205 17:05:04.343348 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:04 crc kubenswrapper[4753]: I1205 17:05:04.343361 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:04 crc kubenswrapper[4753]: I1205 17:05:04.343378 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:04 crc kubenswrapper[4753]: I1205 17:05:04.343390 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:04Z","lastTransitionTime":"2025-12-05T17:05:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:04 crc kubenswrapper[4753]: I1205 17:05:04.446039 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:04 crc kubenswrapper[4753]: I1205 17:05:04.446083 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:04 crc kubenswrapper[4753]: I1205 17:05:04.446103 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:04 crc kubenswrapper[4753]: I1205 17:05:04.446119 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:04 crc kubenswrapper[4753]: I1205 17:05:04.446186 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:04Z","lastTransitionTime":"2025-12-05T17:05:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:04 crc kubenswrapper[4753]: I1205 17:05:04.549397 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:04 crc kubenswrapper[4753]: I1205 17:05:04.549439 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:04 crc kubenswrapper[4753]: I1205 17:05:04.549447 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:04 crc kubenswrapper[4753]: I1205 17:05:04.549462 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:04 crc kubenswrapper[4753]: I1205 17:05:04.549471 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:04Z","lastTransitionTime":"2025-12-05T17:05:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:04 crc kubenswrapper[4753]: I1205 17:05:04.651992 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:04 crc kubenswrapper[4753]: I1205 17:05:04.652029 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:04 crc kubenswrapper[4753]: I1205 17:05:04.652039 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:04 crc kubenswrapper[4753]: I1205 17:05:04.652053 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:04 crc kubenswrapper[4753]: I1205 17:05:04.652062 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:04Z","lastTransitionTime":"2025-12-05T17:05:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:04 crc kubenswrapper[4753]: I1205 17:05:04.720364 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:05:04 crc kubenswrapper[4753]: I1205 17:05:04.720415 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:05:04 crc kubenswrapper[4753]: I1205 17:05:04.720418 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jjgfd" Dec 05 17:05:04 crc kubenswrapper[4753]: E1205 17:05:04.720489 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:05:04 crc kubenswrapper[4753]: E1205 17:05:04.720597 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:05:04 crc kubenswrapper[4753]: E1205 17:05:04.720684 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jjgfd" podUID="00ab636b-9cc9-4a6f-8e6e-6442b35280ca" Dec 05 17:05:04 crc kubenswrapper[4753]: I1205 17:05:04.754524 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:04 crc kubenswrapper[4753]: I1205 17:05:04.754551 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:04 crc kubenswrapper[4753]: I1205 17:05:04.754560 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:04 crc kubenswrapper[4753]: I1205 17:05:04.754573 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:04 crc kubenswrapper[4753]: I1205 17:05:04.754582 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:04Z","lastTransitionTime":"2025-12-05T17:05:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:04 crc kubenswrapper[4753]: I1205 17:05:04.856732 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:04 crc kubenswrapper[4753]: I1205 17:05:04.856770 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:04 crc kubenswrapper[4753]: I1205 17:05:04.856779 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:04 crc kubenswrapper[4753]: I1205 17:05:04.856796 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:04 crc kubenswrapper[4753]: I1205 17:05:04.856806 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:04Z","lastTransitionTime":"2025-12-05T17:05:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:04 crc kubenswrapper[4753]: I1205 17:05:04.915342 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/00ab636b-9cc9-4a6f-8e6e-6442b35280ca-metrics-certs\") pod \"network-metrics-daemon-jjgfd\" (UID: \"00ab636b-9cc9-4a6f-8e6e-6442b35280ca\") " pod="openshift-multus/network-metrics-daemon-jjgfd" Dec 05 17:05:04 crc kubenswrapper[4753]: E1205 17:05:04.915539 4753 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 17:05:04 crc kubenswrapper[4753]: E1205 17:05:04.915627 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/00ab636b-9cc9-4a6f-8e6e-6442b35280ca-metrics-certs podName:00ab636b-9cc9-4a6f-8e6e-6442b35280ca nodeName:}" failed. No retries permitted until 2025-12-05 17:05:12.915610106 +0000 UTC m=+51.418717112 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/00ab636b-9cc9-4a6f-8e6e-6442b35280ca-metrics-certs") pod "network-metrics-daemon-jjgfd" (UID: "00ab636b-9cc9-4a6f-8e6e-6442b35280ca") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 17:05:04 crc kubenswrapper[4753]: I1205 17:05:04.959415 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:04 crc kubenswrapper[4753]: I1205 17:05:04.959455 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:04 crc kubenswrapper[4753]: I1205 17:05:04.959465 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:04 crc kubenswrapper[4753]: I1205 17:05:04.959480 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:04 crc kubenswrapper[4753]: I1205 17:05:04.959491 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:04Z","lastTransitionTime":"2025-12-05T17:05:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.061298 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.061333 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.061343 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.061356 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.061365 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:05Z","lastTransitionTime":"2025-12-05T17:05:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.163902 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.163931 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.163939 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.163952 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.163960 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:05Z","lastTransitionTime":"2025-12-05T17:05:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.265683 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.265706 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.265714 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.265727 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.265736 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:05Z","lastTransitionTime":"2025-12-05T17:05:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.270834 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.270902 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.270913 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.270930 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.270942 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:05Z","lastTransitionTime":"2025-12-05T17:05:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:05 crc kubenswrapper[4753]: E1205 17:05:05.282892 4753 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:05Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:05Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6cf3541b-b15a-4035-99a1-dd4a7b86e07c\\\",\\\"systemUUID\\\":\\\"6ae126b1-60b3-4aa4-8711-3da4c1b89426\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:05Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.286735 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.286769 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.286781 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.286793 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.286802 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:05Z","lastTransitionTime":"2025-12-05T17:05:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:05 crc kubenswrapper[4753]: E1205 17:05:05.298548 4753 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:05Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:05Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6cf3541b-b15a-4035-99a1-dd4a7b86e07c\\\",\\\"systemUUID\\\":\\\"6ae126b1-60b3-4aa4-8711-3da4c1b89426\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:05Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.301954 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.301999 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.302017 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.302032 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.302043 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:05Z","lastTransitionTime":"2025-12-05T17:05:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:05 crc kubenswrapper[4753]: E1205 17:05:05.315810 4753 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:05Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:05Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6cf3541b-b15a-4035-99a1-dd4a7b86e07c\\\",\\\"systemUUID\\\":\\\"6ae126b1-60b3-4aa4-8711-3da4c1b89426\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:05Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.319696 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.319727 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.319735 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.319748 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.319760 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:05Z","lastTransitionTime":"2025-12-05T17:05:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:05 crc kubenswrapper[4753]: E1205 17:05:05.330820 4753 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:05Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:05Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6cf3541b-b15a-4035-99a1-dd4a7b86e07c\\\",\\\"systemUUID\\\":\\\"6ae126b1-60b3-4aa4-8711-3da4c1b89426\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:05Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.333700 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.333730 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.333738 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.333751 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.333760 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:05Z","lastTransitionTime":"2025-12-05T17:05:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:05 crc kubenswrapper[4753]: E1205 17:05:05.344324 4753 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:05Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:05Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6cf3541b-b15a-4035-99a1-dd4a7b86e07c\\\",\\\"systemUUID\\\":\\\"6ae126b1-60b3-4aa4-8711-3da4c1b89426\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:05Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:05 crc kubenswrapper[4753]: E1205 17:05:05.344435 4753 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.367587 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.367645 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.367656 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.367670 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.367680 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:05Z","lastTransitionTime":"2025-12-05T17:05:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.469768 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.469798 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.469807 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.469820 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.469831 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:05Z","lastTransitionTime":"2025-12-05T17:05:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.571952 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.571992 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.572008 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.572025 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.572036 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:05Z","lastTransitionTime":"2025-12-05T17:05:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.674656 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.674698 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.674708 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.674724 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.674734 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:05Z","lastTransitionTime":"2025-12-05T17:05:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.720187 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:05:05 crc kubenswrapper[4753]: E1205 17:05:05.720318 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.777257 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.777302 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.777325 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.777343 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.777356 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:05Z","lastTransitionTime":"2025-12-05T17:05:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.880296 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.880338 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.880493 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.880512 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.880525 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:05Z","lastTransitionTime":"2025-12-05T17:05:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.983527 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.983566 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.983577 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.983590 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:05 crc kubenswrapper[4753]: I1205 17:05:05.983601 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:05Z","lastTransitionTime":"2025-12-05T17:05:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:06 crc kubenswrapper[4753]: I1205 17:05:06.085829 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:06 crc kubenswrapper[4753]: I1205 17:05:06.085894 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:06 crc kubenswrapper[4753]: I1205 17:05:06.085917 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:06 crc kubenswrapper[4753]: I1205 17:05:06.085946 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:06 crc kubenswrapper[4753]: I1205 17:05:06.085968 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:06Z","lastTransitionTime":"2025-12-05T17:05:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:06 crc kubenswrapper[4753]: I1205 17:05:06.188112 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:06 crc kubenswrapper[4753]: I1205 17:05:06.188201 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:06 crc kubenswrapper[4753]: I1205 17:05:06.188216 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:06 crc kubenswrapper[4753]: I1205 17:05:06.188234 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:06 crc kubenswrapper[4753]: I1205 17:05:06.188247 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:06Z","lastTransitionTime":"2025-12-05T17:05:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:06 crc kubenswrapper[4753]: I1205 17:05:06.291033 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:06 crc kubenswrapper[4753]: I1205 17:05:06.291088 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:06 crc kubenswrapper[4753]: I1205 17:05:06.291106 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:06 crc kubenswrapper[4753]: I1205 17:05:06.291127 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:06 crc kubenswrapper[4753]: I1205 17:05:06.291147 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:06Z","lastTransitionTime":"2025-12-05T17:05:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:06 crc kubenswrapper[4753]: I1205 17:05:06.393858 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:06 crc kubenswrapper[4753]: I1205 17:05:06.393898 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:06 crc kubenswrapper[4753]: I1205 17:05:06.393909 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:06 crc kubenswrapper[4753]: I1205 17:05:06.393925 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:06 crc kubenswrapper[4753]: I1205 17:05:06.393936 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:06Z","lastTransitionTime":"2025-12-05T17:05:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:06 crc kubenswrapper[4753]: I1205 17:05:06.496083 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:06 crc kubenswrapper[4753]: I1205 17:05:06.496143 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:06 crc kubenswrapper[4753]: I1205 17:05:06.496182 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:06 crc kubenswrapper[4753]: I1205 17:05:06.496204 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:06 crc kubenswrapper[4753]: I1205 17:05:06.496219 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:06Z","lastTransitionTime":"2025-12-05T17:05:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:06 crc kubenswrapper[4753]: I1205 17:05:06.599085 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:06 crc kubenswrapper[4753]: I1205 17:05:06.599128 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:06 crc kubenswrapper[4753]: I1205 17:05:06.599140 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:06 crc kubenswrapper[4753]: I1205 17:05:06.599174 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:06 crc kubenswrapper[4753]: I1205 17:05:06.599184 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:06Z","lastTransitionTime":"2025-12-05T17:05:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:06 crc kubenswrapper[4753]: I1205 17:05:06.701573 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:06 crc kubenswrapper[4753]: I1205 17:05:06.701612 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:06 crc kubenswrapper[4753]: I1205 17:05:06.701621 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:06 crc kubenswrapper[4753]: I1205 17:05:06.701635 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:06 crc kubenswrapper[4753]: I1205 17:05:06.701645 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:06Z","lastTransitionTime":"2025-12-05T17:05:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:06 crc kubenswrapper[4753]: I1205 17:05:06.720430 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jjgfd" Dec 05 17:05:06 crc kubenswrapper[4753]: I1205 17:05:06.720446 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:05:06 crc kubenswrapper[4753]: I1205 17:05:06.720515 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:05:06 crc kubenswrapper[4753]: E1205 17:05:06.720550 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jjgfd" podUID="00ab636b-9cc9-4a6f-8e6e-6442b35280ca" Dec 05 17:05:06 crc kubenswrapper[4753]: E1205 17:05:06.720672 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:05:06 crc kubenswrapper[4753]: E1205 17:05:06.720769 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:05:06 crc kubenswrapper[4753]: I1205 17:05:06.804169 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:06 crc kubenswrapper[4753]: I1205 17:05:06.804204 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:06 crc kubenswrapper[4753]: I1205 17:05:06.804212 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:06 crc kubenswrapper[4753]: I1205 17:05:06.804225 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:06 crc kubenswrapper[4753]: I1205 17:05:06.804233 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:06Z","lastTransitionTime":"2025-12-05T17:05:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:06 crc kubenswrapper[4753]: I1205 17:05:06.906870 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:06 crc kubenswrapper[4753]: I1205 17:05:06.906918 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:06 crc kubenswrapper[4753]: I1205 17:05:06.906928 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:06 crc kubenswrapper[4753]: I1205 17:05:06.906942 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:06 crc kubenswrapper[4753]: I1205 17:05:06.906952 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:06Z","lastTransitionTime":"2025-12-05T17:05:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:07 crc kubenswrapper[4753]: I1205 17:05:07.011469 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:07 crc kubenswrapper[4753]: I1205 17:05:07.011529 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:07 crc kubenswrapper[4753]: I1205 17:05:07.011542 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:07 crc kubenswrapper[4753]: I1205 17:05:07.011561 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:07 crc kubenswrapper[4753]: I1205 17:05:07.011577 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:07Z","lastTransitionTime":"2025-12-05T17:05:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:07 crc kubenswrapper[4753]: I1205 17:05:07.114634 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:07 crc kubenswrapper[4753]: I1205 17:05:07.114668 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:07 crc kubenswrapper[4753]: I1205 17:05:07.114679 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:07 crc kubenswrapper[4753]: I1205 17:05:07.114694 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:07 crc kubenswrapper[4753]: I1205 17:05:07.114704 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:07Z","lastTransitionTime":"2025-12-05T17:05:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:07 crc kubenswrapper[4753]: I1205 17:05:07.217250 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:07 crc kubenswrapper[4753]: I1205 17:05:07.217292 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:07 crc kubenswrapper[4753]: I1205 17:05:07.217304 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:07 crc kubenswrapper[4753]: I1205 17:05:07.217319 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:07 crc kubenswrapper[4753]: I1205 17:05:07.217329 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:07Z","lastTransitionTime":"2025-12-05T17:05:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:07 crc kubenswrapper[4753]: I1205 17:05:07.320543 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:07 crc kubenswrapper[4753]: I1205 17:05:07.320648 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:07 crc kubenswrapper[4753]: I1205 17:05:07.320685 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:07 crc kubenswrapper[4753]: I1205 17:05:07.320722 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:07 crc kubenswrapper[4753]: I1205 17:05:07.320746 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:07Z","lastTransitionTime":"2025-12-05T17:05:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:07 crc kubenswrapper[4753]: I1205 17:05:07.423176 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:07 crc kubenswrapper[4753]: I1205 17:05:07.423249 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:07 crc kubenswrapper[4753]: I1205 17:05:07.423268 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:07 crc kubenswrapper[4753]: I1205 17:05:07.423296 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:07 crc kubenswrapper[4753]: I1205 17:05:07.423314 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:07Z","lastTransitionTime":"2025-12-05T17:05:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:07 crc kubenswrapper[4753]: I1205 17:05:07.526205 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:07 crc kubenswrapper[4753]: I1205 17:05:07.526458 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:07 crc kubenswrapper[4753]: I1205 17:05:07.526563 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:07 crc kubenswrapper[4753]: I1205 17:05:07.526655 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:07 crc kubenswrapper[4753]: I1205 17:05:07.526728 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:07Z","lastTransitionTime":"2025-12-05T17:05:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:07 crc kubenswrapper[4753]: I1205 17:05:07.630385 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:07 crc kubenswrapper[4753]: I1205 17:05:07.630452 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:07 crc kubenswrapper[4753]: I1205 17:05:07.630474 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:07 crc kubenswrapper[4753]: I1205 17:05:07.630511 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:07 crc kubenswrapper[4753]: I1205 17:05:07.630531 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:07Z","lastTransitionTime":"2025-12-05T17:05:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:07 crc kubenswrapper[4753]: I1205 17:05:07.720041 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:05:07 crc kubenswrapper[4753]: E1205 17:05:07.720293 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:05:07 crc kubenswrapper[4753]: I1205 17:05:07.733692 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:07 crc kubenswrapper[4753]: I1205 17:05:07.733728 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:07 crc kubenswrapper[4753]: I1205 17:05:07.733737 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:07 crc kubenswrapper[4753]: I1205 17:05:07.733751 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:07 crc kubenswrapper[4753]: I1205 17:05:07.733764 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:07Z","lastTransitionTime":"2025-12-05T17:05:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:07 crc kubenswrapper[4753]: I1205 17:05:07.836508 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:07 crc kubenswrapper[4753]: I1205 17:05:07.836551 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:07 crc kubenswrapper[4753]: I1205 17:05:07.836566 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:07 crc kubenswrapper[4753]: I1205 17:05:07.836587 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:07 crc kubenswrapper[4753]: I1205 17:05:07.836602 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:07Z","lastTransitionTime":"2025-12-05T17:05:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:07 crc kubenswrapper[4753]: I1205 17:05:07.939340 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:07 crc kubenswrapper[4753]: I1205 17:05:07.939423 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:07 crc kubenswrapper[4753]: I1205 17:05:07.939445 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:07 crc kubenswrapper[4753]: I1205 17:05:07.939483 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:07 crc kubenswrapper[4753]: I1205 17:05:07.939505 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:07Z","lastTransitionTime":"2025-12-05T17:05:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:08 crc kubenswrapper[4753]: I1205 17:05:08.042132 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:08 crc kubenswrapper[4753]: I1205 17:05:08.042189 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:08 crc kubenswrapper[4753]: I1205 17:05:08.042199 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:08 crc kubenswrapper[4753]: I1205 17:05:08.042219 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:08 crc kubenswrapper[4753]: I1205 17:05:08.042230 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:08Z","lastTransitionTime":"2025-12-05T17:05:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:08 crc kubenswrapper[4753]: I1205 17:05:08.144144 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:08 crc kubenswrapper[4753]: I1205 17:05:08.144200 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:08 crc kubenswrapper[4753]: I1205 17:05:08.144211 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:08 crc kubenswrapper[4753]: I1205 17:05:08.144226 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:08 crc kubenswrapper[4753]: I1205 17:05:08.144238 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:08Z","lastTransitionTime":"2025-12-05T17:05:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:08 crc kubenswrapper[4753]: I1205 17:05:08.246376 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:08 crc kubenswrapper[4753]: I1205 17:05:08.246407 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:08 crc kubenswrapper[4753]: I1205 17:05:08.246415 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:08 crc kubenswrapper[4753]: I1205 17:05:08.246428 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:08 crc kubenswrapper[4753]: I1205 17:05:08.246437 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:08Z","lastTransitionTime":"2025-12-05T17:05:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:08 crc kubenswrapper[4753]: I1205 17:05:08.349661 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:08 crc kubenswrapper[4753]: I1205 17:05:08.349710 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:08 crc kubenswrapper[4753]: I1205 17:05:08.349732 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:08 crc kubenswrapper[4753]: I1205 17:05:08.349750 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:08 crc kubenswrapper[4753]: I1205 17:05:08.349763 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:08Z","lastTransitionTime":"2025-12-05T17:05:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:08 crc kubenswrapper[4753]: I1205 17:05:08.452692 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:08 crc kubenswrapper[4753]: I1205 17:05:08.452725 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:08 crc kubenswrapper[4753]: I1205 17:05:08.452749 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:08 crc kubenswrapper[4753]: I1205 17:05:08.452763 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:08 crc kubenswrapper[4753]: I1205 17:05:08.452772 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:08Z","lastTransitionTime":"2025-12-05T17:05:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:08 crc kubenswrapper[4753]: I1205 17:05:08.555624 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:08 crc kubenswrapper[4753]: I1205 17:05:08.555668 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:08 crc kubenswrapper[4753]: I1205 17:05:08.555680 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:08 crc kubenswrapper[4753]: I1205 17:05:08.555697 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:08 crc kubenswrapper[4753]: I1205 17:05:08.555708 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:08Z","lastTransitionTime":"2025-12-05T17:05:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:08 crc kubenswrapper[4753]: I1205 17:05:08.658574 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:08 crc kubenswrapper[4753]: I1205 17:05:08.658859 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:08 crc kubenswrapper[4753]: I1205 17:05:08.658953 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:08 crc kubenswrapper[4753]: I1205 17:05:08.659041 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:08 crc kubenswrapper[4753]: I1205 17:05:08.659128 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:08Z","lastTransitionTime":"2025-12-05T17:05:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:08 crc kubenswrapper[4753]: I1205 17:05:08.719630 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:05:08 crc kubenswrapper[4753]: I1205 17:05:08.719741 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:05:08 crc kubenswrapper[4753]: I1205 17:05:08.720191 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jjgfd" Dec 05 17:05:08 crc kubenswrapper[4753]: E1205 17:05:08.720464 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:05:08 crc kubenswrapper[4753]: E1205 17:05:08.720622 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jjgfd" podUID="00ab636b-9cc9-4a6f-8e6e-6442b35280ca" Dec 05 17:05:08 crc kubenswrapper[4753]: E1205 17:05:08.720546 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:05:08 crc kubenswrapper[4753]: I1205 17:05:08.761126 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:08 crc kubenswrapper[4753]: I1205 17:05:08.761372 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:08 crc kubenswrapper[4753]: I1205 17:05:08.761431 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:08 crc kubenswrapper[4753]: I1205 17:05:08.761493 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:08 crc kubenswrapper[4753]: I1205 17:05:08.761585 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:08Z","lastTransitionTime":"2025-12-05T17:05:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:08 crc kubenswrapper[4753]: I1205 17:05:08.865061 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:08 crc kubenswrapper[4753]: I1205 17:05:08.865455 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:08 crc kubenswrapper[4753]: I1205 17:05:08.865599 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:08 crc kubenswrapper[4753]: I1205 17:05:08.865748 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:08 crc kubenswrapper[4753]: I1205 17:05:08.865864 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:08Z","lastTransitionTime":"2025-12-05T17:05:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:08 crc kubenswrapper[4753]: I1205 17:05:08.968903 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:08 crc kubenswrapper[4753]: I1205 17:05:08.969123 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:08 crc kubenswrapper[4753]: I1205 17:05:08.969231 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:08 crc kubenswrapper[4753]: I1205 17:05:08.969294 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:08 crc kubenswrapper[4753]: I1205 17:05:08.969346 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:08Z","lastTransitionTime":"2025-12-05T17:05:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:09 crc kubenswrapper[4753]: I1205 17:05:09.072849 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:09 crc kubenswrapper[4753]: I1205 17:05:09.072900 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:09 crc kubenswrapper[4753]: I1205 17:05:09.072912 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:09 crc kubenswrapper[4753]: I1205 17:05:09.072934 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:09 crc kubenswrapper[4753]: I1205 17:05:09.072949 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:09Z","lastTransitionTime":"2025-12-05T17:05:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:09 crc kubenswrapper[4753]: I1205 17:05:09.175748 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:09 crc kubenswrapper[4753]: I1205 17:05:09.175818 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:09 crc kubenswrapper[4753]: I1205 17:05:09.175841 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:09 crc kubenswrapper[4753]: I1205 17:05:09.175865 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:09 crc kubenswrapper[4753]: I1205 17:05:09.175882 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:09Z","lastTransitionTime":"2025-12-05T17:05:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:09 crc kubenswrapper[4753]: I1205 17:05:09.280214 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:09 crc kubenswrapper[4753]: I1205 17:05:09.280258 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:09 crc kubenswrapper[4753]: I1205 17:05:09.280272 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:09 crc kubenswrapper[4753]: I1205 17:05:09.280288 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:09 crc kubenswrapper[4753]: I1205 17:05:09.280302 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:09Z","lastTransitionTime":"2025-12-05T17:05:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:09 crc kubenswrapper[4753]: I1205 17:05:09.384983 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:09 crc kubenswrapper[4753]: I1205 17:05:09.385987 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:09 crc kubenswrapper[4753]: I1205 17:05:09.386324 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:09 crc kubenswrapper[4753]: I1205 17:05:09.386612 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:09 crc kubenswrapper[4753]: I1205 17:05:09.387020 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:09Z","lastTransitionTime":"2025-12-05T17:05:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:09 crc kubenswrapper[4753]: I1205 17:05:09.490720 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:09 crc kubenswrapper[4753]: I1205 17:05:09.490761 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:09 crc kubenswrapper[4753]: I1205 17:05:09.490776 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:09 crc kubenswrapper[4753]: I1205 17:05:09.490795 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:09 crc kubenswrapper[4753]: I1205 17:05:09.490810 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:09Z","lastTransitionTime":"2025-12-05T17:05:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:09 crc kubenswrapper[4753]: I1205 17:05:09.593702 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:09 crc kubenswrapper[4753]: I1205 17:05:09.593740 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:09 crc kubenswrapper[4753]: I1205 17:05:09.593750 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:09 crc kubenswrapper[4753]: I1205 17:05:09.593765 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:09 crc kubenswrapper[4753]: I1205 17:05:09.593774 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:09Z","lastTransitionTime":"2025-12-05T17:05:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:09 crc kubenswrapper[4753]: I1205 17:05:09.696390 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:09 crc kubenswrapper[4753]: I1205 17:05:09.696468 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:09 crc kubenswrapper[4753]: I1205 17:05:09.696504 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:09 crc kubenswrapper[4753]: I1205 17:05:09.696535 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:09 crc kubenswrapper[4753]: I1205 17:05:09.696557 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:09Z","lastTransitionTime":"2025-12-05T17:05:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:09 crc kubenswrapper[4753]: I1205 17:05:09.720611 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:05:09 crc kubenswrapper[4753]: E1205 17:05:09.720770 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:05:09 crc kubenswrapper[4753]: I1205 17:05:09.799115 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:09 crc kubenswrapper[4753]: I1205 17:05:09.799232 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:09 crc kubenswrapper[4753]: I1205 17:05:09.799245 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:09 crc kubenswrapper[4753]: I1205 17:05:09.799260 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:09 crc kubenswrapper[4753]: I1205 17:05:09.799269 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:09Z","lastTransitionTime":"2025-12-05T17:05:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:09 crc kubenswrapper[4753]: I1205 17:05:09.902259 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:09 crc kubenswrapper[4753]: I1205 17:05:09.902604 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:09 crc kubenswrapper[4753]: I1205 17:05:09.902710 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:09 crc kubenswrapper[4753]: I1205 17:05:09.902804 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:09 crc kubenswrapper[4753]: I1205 17:05:09.902930 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:09Z","lastTransitionTime":"2025-12-05T17:05:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:10 crc kubenswrapper[4753]: I1205 17:05:10.005916 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:10 crc kubenswrapper[4753]: I1205 17:05:10.005957 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:10 crc kubenswrapper[4753]: I1205 17:05:10.005969 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:10 crc kubenswrapper[4753]: I1205 17:05:10.005988 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:10 crc kubenswrapper[4753]: I1205 17:05:10.006001 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:10Z","lastTransitionTime":"2025-12-05T17:05:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:10 crc kubenswrapper[4753]: I1205 17:05:10.109292 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:10 crc kubenswrapper[4753]: I1205 17:05:10.109498 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:10 crc kubenswrapper[4753]: I1205 17:05:10.109518 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:10 crc kubenswrapper[4753]: I1205 17:05:10.109543 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:10 crc kubenswrapper[4753]: I1205 17:05:10.109562 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:10Z","lastTransitionTime":"2025-12-05T17:05:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:10 crc kubenswrapper[4753]: I1205 17:05:10.212487 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:10 crc kubenswrapper[4753]: I1205 17:05:10.212855 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:10 crc kubenswrapper[4753]: I1205 17:05:10.212981 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:10 crc kubenswrapper[4753]: I1205 17:05:10.213102 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:10 crc kubenswrapper[4753]: I1205 17:05:10.213276 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:10Z","lastTransitionTime":"2025-12-05T17:05:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:10 crc kubenswrapper[4753]: I1205 17:05:10.315735 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:10 crc kubenswrapper[4753]: I1205 17:05:10.315779 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:10 crc kubenswrapper[4753]: I1205 17:05:10.315791 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:10 crc kubenswrapper[4753]: I1205 17:05:10.315809 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:10 crc kubenswrapper[4753]: I1205 17:05:10.315819 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:10Z","lastTransitionTime":"2025-12-05T17:05:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:10 crc kubenswrapper[4753]: I1205 17:05:10.365983 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 17:05:10 crc kubenswrapper[4753]: I1205 17:05:10.373795 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 05 17:05:10 crc kubenswrapper[4753]: I1205 17:05:10.380572 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://408690fbd818b50273c914cc463acec30ff77bbf7d26680bffa2451a1d1ddcf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:10Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:10 crc kubenswrapper[4753]: I1205 17:05:10.392642 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78c3b80f4d08dc9fd4215fc87bbd7331f765f301fa587f5fd4ded54cd8db6587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60c5d0ca3d26171050d592c3d76f0db4ca1a12344c40825a8e7c5fd579814c80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-khn68\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:10Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:10 crc kubenswrapper[4753]: I1205 17:05:10.404442 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-jjgfd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"00ab636b-9cc9-4a6f-8e6e-6442b35280ca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhlg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhlg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:57Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-jjgfd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:10Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:10 crc kubenswrapper[4753]: I1205 17:05:10.418016 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:10 crc kubenswrapper[4753]: I1205 17:05:10.418051 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:10 crc kubenswrapper[4753]: I1205 17:05:10.418060 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:10 crc kubenswrapper[4753]: I1205 17:05:10.418075 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:10 crc kubenswrapper[4753]: I1205 17:05:10.418086 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:10Z","lastTransitionTime":"2025-12-05T17:05:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:10 crc kubenswrapper[4753]: I1205 17:05:10.420824 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee37fdbe-64d5-4bb1-8522-932d83e0648e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://195b8bf18e6cf937def214e377ec9def799c19ea4d5f008b66cc42048656ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a5f37b26bfe7933088e08379662250e7b188efbbb5784d49e8d86262e10416\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0061bf86592b301a0a9398d2938e614f76616ed7ddcf310aa50c961632fb58b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20eb41ef00b7711fdfef0e4f3bd83b2fbdb193be96cafc0ef87f048271e7dd78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fea878c0275a3934d26327c13cf5a93ae3b881132788f3d031af5b382bbdd02\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 17:04:35.064086 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 17:04:35.066802 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1773129596/tls.crt::/tmp/serving-cert-1773129596/tls.key\\\\\\\"\\\\nI1205 17:04:40.565607 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 17:04:40.585450 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 17:04:40.585477 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 17:04:40.585506 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 17:04:40.585513 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:04:40.600592 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:04:40.600617 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600622 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600626 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:04:40.600629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:04:40.600632 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:04:40.600635 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:04:40.600704 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 17:04:40.602558 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0585e546af65a61265267b97083556b811ad35ee6c48fa84262b3f4f25eaf907\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:10Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:10 crc kubenswrapper[4753]: I1205 17:05:10.436958 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8934d1ec7490b421708e4678d69a55cdd381563ada3d945d066fcf4abba49f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e799acff1e50a664e95e30896e435bc77a35c8560044c5f180e16c3534bcf828\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:10Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:10 crc kubenswrapper[4753]: I1205 17:05:10.449329 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c52dbe30298514a572fd7cf4a301357ef0290af5894be7f9d08aff6c3fbe85a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:10Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:10 crc kubenswrapper[4753]: I1205 17:05:10.460560 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hpl8r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b2cc49130697a15123f202462d2f0781903baea370d4906d22d1b39c23320d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmzrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hpl8r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:10Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:10 crc kubenswrapper[4753]: I1205 17:05:10.474573 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:10Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:10 crc kubenswrapper[4753]: I1205 17:05:10.487460 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:10Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:10 crc kubenswrapper[4753]: I1205 17:05:10.498685 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:10Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:10 crc kubenswrapper[4753]: I1205 17:05:10.507609 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vj5f7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f38be29-f040-4e7d-9026-36929c0c5cda\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b5d403644e00b367518058ecf22ed9913df9a1353e2fede8802deaa288a48e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc2cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vj5f7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:10Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:10 crc kubenswrapper[4753]: I1205 17:05:10.520647 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:10 crc kubenswrapper[4753]: I1205 17:05:10.520687 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:10 crc kubenswrapper[4753]: I1205 17:05:10.520696 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:10 crc kubenswrapper[4753]: I1205 17:05:10.520711 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:10 crc kubenswrapper[4753]: I1205 17:05:10.520720 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:10Z","lastTransitionTime":"2025-12-05T17:05:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:10 crc kubenswrapper[4753]: I1205 17:05:10.525415 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f6585b3b62fc4a78f89b3413326d6d5259cb8c338936f2d5def6185d81d46f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cb72322d436f10b006a4bb7a91b255451aba90e86100fdf249be6443159bd89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d0273d889da0bd6ea3db8040da2eab62fc750f308cb9a62dee71b6c9f2fadd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e79e99eb44ff95c2f9929326313e41629e6e03ebc068537bd27bc4c89ad5b0ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20bcab2c2912120c819bacdf478db82a78500908125e6efadaef1f3409eb0d49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ead15f2a3179f03d770beadcaa1374561d5aebb4874615a334edccff0c2f300\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad2339c202e5abc4e88ebbfc57c630d5d76fd6fd5e9487a31159ae3d3ebb9970\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad2339c202e5abc4e88ebbfc57c630d5d76fd6fd5e9487a31159ae3d3ebb9970\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"message\\\":\\\"tion, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:54Z is after 2025-08-24T17:21:41Z]\\\\nI1205 17:04:54.770375 6204 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g\\\\nI1205 17:04:54.770514 6204 ovn.go:134] Ensuring zone local for Pod openshift-machine-config-operator/machine-config-daemon-khn68 in node crc\\\\nI1205 17:04:54.770521 6204 ovn.go:134] Ensuring zone local for Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g in node crc\\\\nI1205 17:04:54.770512 6204 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-hpl8r\\\\nI1205 17:04:54.770522 6204 obj_retry.go:386] Retry successful for *v1.Pod openshift-machine-config-operator/machine-config-daemon-khn68 after 0 failed attempt(s)\\\\nI1205 17:04:54.770531 6204 default_network_controller.go:776] Recording success event on pod openshift\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:54Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-98fvv_openshift-ovn-kubernetes(f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://899db0b5d45776b0560a2908ce63ec8ea2b458e2e492189737e930688b4ed2e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://073ffcef7324b498f2cb47680b64bf44e474a7732a5c2b34661da22c3e0f6c17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://073ffcef7324b498f2cb47680b64bf44e474a7732a5c2b34661da22c3e0f6c17\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-98fvv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:10Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:10 crc kubenswrapper[4753]: I1205 17:05:10.537582 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-f6qn6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"693c26bb-e75f-4f7f-bd2a-bdf7dcb0af06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02ea0085a961c324f4a426d68972043f4b24b3aa03aed623ebc46bbafc857d1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g4w6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-f6qn6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:10Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:10 crc kubenswrapper[4753]: I1205 17:05:10.549930 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bhvk4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"338d3da9-209c-4ca9-a37d-6ea5731d1622\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7747e4738055a7b994119bad9b30d47e0b510c7407c53df29b674c553dbbcbb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qh6zg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://097885ac231f081a81d51cf0091df6f93ca49068340438454db429194e5475b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qh6zg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-bhvk4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:10Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:10 crc kubenswrapper[4753]: I1205 17:05:10.562819 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65284411-042e-46b9-8e55-cabc9e78a397\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e83b48779d4b8b709f9bd7351180040315002f738ada5ab034c883f87ef8734\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f6e27e109eeb52dae4e4b35bdf84e78f0786fc8d15c2498dd8d8c70c3d64e4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7886d72b05768ec1d515cfb2837f52ae16e82445b72a3016762308b80892d55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a662a8ca2ca9bcf02ace463bf26e44293b9776c69f3f1f84110392c3cab6e83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:10Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:10 crc kubenswrapper[4753]: I1205 17:05:10.577456 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2rg4s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"751d4d21-4eb8-4236-bca2-d81f094ff2f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bda355ba818e87604cf7cfba53f0ee3116f2d4234e7d9631ec7e6571ed34030\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa8d4e5672a07ea28ddba02eaaf603750cd3067ff47165776d2dc7f2ad42a290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa8d4e5672a07ea28ddba02eaaf603750cd3067ff47165776d2dc7f2ad42a290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87e685f0d2c857d143c3ee4b13adf2afe6796c40ae87e2a304b78783907a785a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87e685f0d2c857d143c3ee4b13adf2afe6796c40ae87e2a304b78783907a785a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://842a75d6b100069785dbaf413df3b610a5df28c9ec124100eb31c965985282fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://842a75d6b100069785dbaf413df3b610a5df28c9ec124100eb31c965985282fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4ac00f8ef346db3de91e7bec01a56eac9f27d5a501d6f686173ece4897b9694\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4ac00f8ef346db3de91e7bec01a56eac9f27d5a501d6f686173ece4897b9694\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3585437cbe0f17e5bf92a74123f0e6f13475b6e5d49b145f2b9b869559b53310\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3585437cbe0f17e5bf92a74123f0e6f13475b6e5d49b145f2b9b869559b53310\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e60aa96f2377e63e44e932efe51f79832b778b8ef41fd47e728a80286956c62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e60aa96f2377e63e44e932efe51f79832b778b8ef41fd47e728a80286956c62\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2rg4s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:10Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:10 crc kubenswrapper[4753]: I1205 17:05:10.623192 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:10 crc kubenswrapper[4753]: I1205 17:05:10.623273 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:10 crc kubenswrapper[4753]: I1205 17:05:10.623295 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:10 crc kubenswrapper[4753]: I1205 17:05:10.623324 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:10 crc kubenswrapper[4753]: I1205 17:05:10.623341 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:10Z","lastTransitionTime":"2025-12-05T17:05:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:10 crc kubenswrapper[4753]: I1205 17:05:10.719723 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jjgfd" Dec 05 17:05:10 crc kubenswrapper[4753]: I1205 17:05:10.719753 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:05:10 crc kubenswrapper[4753]: I1205 17:05:10.719784 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:05:10 crc kubenswrapper[4753]: E1205 17:05:10.719869 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:05:10 crc kubenswrapper[4753]: E1205 17:05:10.719905 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:05:10 crc kubenswrapper[4753]: E1205 17:05:10.720006 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jjgfd" podUID="00ab636b-9cc9-4a6f-8e6e-6442b35280ca" Dec 05 17:05:10 crc kubenswrapper[4753]: I1205 17:05:10.726103 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:10 crc kubenswrapper[4753]: I1205 17:05:10.726136 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:10 crc kubenswrapper[4753]: I1205 17:05:10.726171 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:10 crc kubenswrapper[4753]: I1205 17:05:10.726192 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:10 crc kubenswrapper[4753]: I1205 17:05:10.726204 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:10Z","lastTransitionTime":"2025-12-05T17:05:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:10 crc kubenswrapper[4753]: I1205 17:05:10.828538 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:10 crc kubenswrapper[4753]: I1205 17:05:10.828578 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:10 crc kubenswrapper[4753]: I1205 17:05:10.828589 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:10 crc kubenswrapper[4753]: I1205 17:05:10.828607 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:10 crc kubenswrapper[4753]: I1205 17:05:10.828621 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:10Z","lastTransitionTime":"2025-12-05T17:05:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:10 crc kubenswrapper[4753]: I1205 17:05:10.930901 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:10 crc kubenswrapper[4753]: I1205 17:05:10.930973 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:10 crc kubenswrapper[4753]: I1205 17:05:10.930997 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:10 crc kubenswrapper[4753]: I1205 17:05:10.931027 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:10 crc kubenswrapper[4753]: I1205 17:05:10.931051 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:10Z","lastTransitionTime":"2025-12-05T17:05:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:11 crc kubenswrapper[4753]: I1205 17:05:11.033552 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:11 crc kubenswrapper[4753]: I1205 17:05:11.033613 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:11 crc kubenswrapper[4753]: I1205 17:05:11.033632 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:11 crc kubenswrapper[4753]: I1205 17:05:11.033655 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:11 crc kubenswrapper[4753]: I1205 17:05:11.033676 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:11Z","lastTransitionTime":"2025-12-05T17:05:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:11 crc kubenswrapper[4753]: I1205 17:05:11.137023 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:11 crc kubenswrapper[4753]: I1205 17:05:11.137101 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:11 crc kubenswrapper[4753]: I1205 17:05:11.137126 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:11 crc kubenswrapper[4753]: I1205 17:05:11.137196 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:11 crc kubenswrapper[4753]: I1205 17:05:11.137224 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:11Z","lastTransitionTime":"2025-12-05T17:05:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:11 crc kubenswrapper[4753]: I1205 17:05:11.240495 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:11 crc kubenswrapper[4753]: I1205 17:05:11.240569 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:11 crc kubenswrapper[4753]: I1205 17:05:11.240590 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:11 crc kubenswrapper[4753]: I1205 17:05:11.240617 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:11 crc kubenswrapper[4753]: I1205 17:05:11.240637 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:11Z","lastTransitionTime":"2025-12-05T17:05:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:11 crc kubenswrapper[4753]: I1205 17:05:11.342864 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:11 crc kubenswrapper[4753]: I1205 17:05:11.342915 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:11 crc kubenswrapper[4753]: I1205 17:05:11.342930 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:11 crc kubenswrapper[4753]: I1205 17:05:11.342956 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:11 crc kubenswrapper[4753]: I1205 17:05:11.342971 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:11Z","lastTransitionTime":"2025-12-05T17:05:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:11 crc kubenswrapper[4753]: I1205 17:05:11.445532 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:11 crc kubenswrapper[4753]: I1205 17:05:11.445584 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:11 crc kubenswrapper[4753]: I1205 17:05:11.445597 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:11 crc kubenswrapper[4753]: I1205 17:05:11.445617 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:11 crc kubenswrapper[4753]: I1205 17:05:11.445631 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:11Z","lastTransitionTime":"2025-12-05T17:05:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:11 crc kubenswrapper[4753]: I1205 17:05:11.548285 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:11 crc kubenswrapper[4753]: I1205 17:05:11.548337 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:11 crc kubenswrapper[4753]: I1205 17:05:11.548353 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:11 crc kubenswrapper[4753]: I1205 17:05:11.548378 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:11 crc kubenswrapper[4753]: I1205 17:05:11.548394 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:11Z","lastTransitionTime":"2025-12-05T17:05:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:11 crc kubenswrapper[4753]: I1205 17:05:11.650976 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:11 crc kubenswrapper[4753]: I1205 17:05:11.651024 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:11 crc kubenswrapper[4753]: I1205 17:05:11.651036 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:11 crc kubenswrapper[4753]: I1205 17:05:11.651052 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:11 crc kubenswrapper[4753]: I1205 17:05:11.651061 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:11Z","lastTransitionTime":"2025-12-05T17:05:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:11 crc kubenswrapper[4753]: I1205 17:05:11.720363 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:05:11 crc kubenswrapper[4753]: E1205 17:05:11.720500 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:05:11 crc kubenswrapper[4753]: I1205 17:05:11.740412 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:11Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:11 crc kubenswrapper[4753]: I1205 17:05:11.753738 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:11 crc kubenswrapper[4753]: I1205 17:05:11.753783 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:11 crc kubenswrapper[4753]: I1205 17:05:11.753793 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:11 crc kubenswrapper[4753]: I1205 17:05:11.753805 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:11 crc kubenswrapper[4753]: I1205 17:05:11.753814 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:11Z","lastTransitionTime":"2025-12-05T17:05:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:11 crc kubenswrapper[4753]: I1205 17:05:11.755657 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:11Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:11 crc kubenswrapper[4753]: I1205 17:05:11.768465 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:11Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:11 crc kubenswrapper[4753]: I1205 17:05:11.781519 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vj5f7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f38be29-f040-4e7d-9026-36929c0c5cda\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b5d403644e00b367518058ecf22ed9913df9a1353e2fede8802deaa288a48e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc2cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vj5f7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:11Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:11 crc kubenswrapper[4753]: I1205 17:05:11.795255 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bhvk4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"338d3da9-209c-4ca9-a37d-6ea5731d1622\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7747e4738055a7b994119bad9b30d47e0b510c7407c53df29b674c553dbbcbb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qh6zg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://097885ac231f081a81d51cf0091df6f93ca49068340438454db429194e5475b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qh6zg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-bhvk4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:11Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:11 crc kubenswrapper[4753]: I1205 17:05:11.810507 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65284411-042e-46b9-8e55-cabc9e78a397\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e83b48779d4b8b709f9bd7351180040315002f738ada5ab034c883f87ef8734\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f6e27e109eeb52dae4e4b35bdf84e78f0786fc8d15c2498dd8d8c70c3d64e4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7886d72b05768ec1d515cfb2837f52ae16e82445b72a3016762308b80892d55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a662a8ca2ca9bcf02ace463bf26e44293b9776c69f3f1f84110392c3cab6e83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:11Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:11 crc kubenswrapper[4753]: I1205 17:05:11.823225 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fb6627d3-0817-4737-b5c7-00b2abc67b0f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bba91e82256742fa5b98e2e481b8ab267ce89d71e180581f34fa371d03bbc1c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf90148205c7fde7c73b3ed1130fa5c9fc4b6562c0e1f4f6b009a72677f32f22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95027e80190e6caa0524ede7ef31c38c8861aeca94936b1a7a819fdc650c7969\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1bd3444395954ccae8d5c8137c6ebd7f62a6c864cea89f98a137af3e26cda8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1bd3444395954ccae8d5c8137c6ebd7f62a6c864cea89f98a137af3e26cda8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:11Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:11 crc kubenswrapper[4753]: I1205 17:05:11.837001 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2rg4s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"751d4d21-4eb8-4236-bca2-d81f094ff2f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bda355ba818e87604cf7cfba53f0ee3116f2d4234e7d9631ec7e6571ed34030\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa8d4e5672a07ea28ddba02eaaf603750cd3067ff47165776d2dc7f2ad42a290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa8d4e5672a07ea28ddba02eaaf603750cd3067ff47165776d2dc7f2ad42a290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87e685f0d2c857d143c3ee4b13adf2afe6796c40ae87e2a304b78783907a785a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87e685f0d2c857d143c3ee4b13adf2afe6796c40ae87e2a304b78783907a785a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://842a75d6b100069785dbaf413df3b610a5df28c9ec124100eb31c965985282fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://842a75d6b100069785dbaf413df3b610a5df28c9ec124100eb31c965985282fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4ac00f8ef346db3de91e7bec01a56eac9f27d5a501d6f686173ece4897b9694\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4ac00f8ef346db3de91e7bec01a56eac9f27d5a501d6f686173ece4897b9694\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3585437cbe0f17e5bf92a74123f0e6f13475b6e5d49b145f2b9b869559b53310\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3585437cbe0f17e5bf92a74123f0e6f13475b6e5d49b145f2b9b869559b53310\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e60aa96f2377e63e44e932efe51f79832b778b8ef41fd47e728a80286956c62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e60aa96f2377e63e44e932efe51f79832b778b8ef41fd47e728a80286956c62\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2rg4s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:11Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:11 crc kubenswrapper[4753]: I1205 17:05:11.856257 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:11 crc kubenswrapper[4753]: I1205 17:05:11.856302 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:11 crc kubenswrapper[4753]: I1205 17:05:11.856319 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:11 crc kubenswrapper[4753]: I1205 17:05:11.856338 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:11 crc kubenswrapper[4753]: I1205 17:05:11.856350 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:11Z","lastTransitionTime":"2025-12-05T17:05:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:11 crc kubenswrapper[4753]: I1205 17:05:11.865404 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f6585b3b62fc4a78f89b3413326d6d5259cb8c338936f2d5def6185d81d46f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cb72322d436f10b006a4bb7a91b255451aba90e86100fdf249be6443159bd89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d0273d889da0bd6ea3db8040da2eab62fc750f308cb9a62dee71b6c9f2fadd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e79e99eb44ff95c2f9929326313e41629e6e03ebc068537bd27bc4c89ad5b0ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20bcab2c2912120c819bacdf478db82a78500908125e6efadaef1f3409eb0d49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ead15f2a3179f03d770beadcaa1374561d5aebb4874615a334edccff0c2f300\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad2339c202e5abc4e88ebbfc57c630d5d76fd6fd5e9487a31159ae3d3ebb9970\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad2339c202e5abc4e88ebbfc57c630d5d76fd6fd5e9487a31159ae3d3ebb9970\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"message\\\":\\\"tion, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:54Z is after 2025-08-24T17:21:41Z]\\\\nI1205 17:04:54.770375 6204 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g\\\\nI1205 17:04:54.770514 6204 ovn.go:134] Ensuring zone local for Pod openshift-machine-config-operator/machine-config-daemon-khn68 in node crc\\\\nI1205 17:04:54.770521 6204 ovn.go:134] Ensuring zone local for Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g in node crc\\\\nI1205 17:04:54.770512 6204 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-hpl8r\\\\nI1205 17:04:54.770522 6204 obj_retry.go:386] Retry successful for *v1.Pod openshift-machine-config-operator/machine-config-daemon-khn68 after 0 failed attempt(s)\\\\nI1205 17:04:54.770531 6204 default_network_controller.go:776] Recording success event on pod openshift\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:54Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-98fvv_openshift-ovn-kubernetes(f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://899db0b5d45776b0560a2908ce63ec8ea2b458e2e492189737e930688b4ed2e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://073ffcef7324b498f2cb47680b64bf44e474a7732a5c2b34661da22c3e0f6c17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://073ffcef7324b498f2cb47680b64bf44e474a7732a5c2b34661da22c3e0f6c17\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-98fvv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:11Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:11 crc kubenswrapper[4753]: I1205 17:05:11.881391 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-f6qn6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"693c26bb-e75f-4f7f-bd2a-bdf7dcb0af06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02ea0085a961c324f4a426d68972043f4b24b3aa03aed623ebc46bbafc857d1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g4w6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-f6qn6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:11Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:11 crc kubenswrapper[4753]: I1205 17:05:11.895762 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://408690fbd818b50273c914cc463acec30ff77bbf7d26680bffa2451a1d1ddcf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:11Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:11 crc kubenswrapper[4753]: I1205 17:05:11.913461 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78c3b80f4d08dc9fd4215fc87bbd7331f765f301fa587f5fd4ded54cd8db6587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60c5d0ca3d26171050d592c3d76f0db4ca1a12344c40825a8e7c5fd579814c80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-khn68\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:11Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:11 crc kubenswrapper[4753]: I1205 17:05:11.928591 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-jjgfd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"00ab636b-9cc9-4a6f-8e6e-6442b35280ca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhlg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhlg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:57Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-jjgfd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:11Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:11 crc kubenswrapper[4753]: I1205 17:05:11.951354 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee37fdbe-64d5-4bb1-8522-932d83e0648e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://195b8bf18e6cf937def214e377ec9def799c19ea4d5f008b66cc42048656ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a5f37b26bfe7933088e08379662250e7b188efbbb5784d49e8d86262e10416\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0061bf86592b301a0a9398d2938e614f76616ed7ddcf310aa50c961632fb58b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20eb41ef00b7711fdfef0e4f3bd83b2fbdb193be96cafc0ef87f048271e7dd78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fea878c0275a3934d26327c13cf5a93ae3b881132788f3d031af5b382bbdd02\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 17:04:35.064086 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 17:04:35.066802 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1773129596/tls.crt::/tmp/serving-cert-1773129596/tls.key\\\\\\\"\\\\nI1205 17:04:40.565607 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 17:04:40.585450 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 17:04:40.585477 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 17:04:40.585506 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 17:04:40.585513 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:04:40.600592 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:04:40.600617 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600622 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600626 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:04:40.600629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:04:40.600632 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:04:40.600635 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:04:40.600704 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 17:04:40.602558 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0585e546af65a61265267b97083556b811ad35ee6c48fa84262b3f4f25eaf907\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:11Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:11 crc kubenswrapper[4753]: I1205 17:05:11.958829 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:11 crc kubenswrapper[4753]: I1205 17:05:11.959046 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:11 crc kubenswrapper[4753]: I1205 17:05:11.959217 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:11 crc kubenswrapper[4753]: I1205 17:05:11.959333 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:11 crc kubenswrapper[4753]: I1205 17:05:11.959443 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:11Z","lastTransitionTime":"2025-12-05T17:05:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:11 crc kubenswrapper[4753]: I1205 17:05:11.972682 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8934d1ec7490b421708e4678d69a55cdd381563ada3d945d066fcf4abba49f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e799acff1e50a664e95e30896e435bc77a35c8560044c5f180e16c3534bcf828\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:11Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:11 crc kubenswrapper[4753]: I1205 17:05:11.992208 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c52dbe30298514a572fd7cf4a301357ef0290af5894be7f9d08aff6c3fbe85a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:11Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:12 crc kubenswrapper[4753]: I1205 17:05:12.006826 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hpl8r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b2cc49130697a15123f202462d2f0781903baea370d4906d22d1b39c23320d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmzrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hpl8r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:12Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:12 crc kubenswrapper[4753]: I1205 17:05:12.063057 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:12 crc kubenswrapper[4753]: I1205 17:05:12.063235 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:12 crc kubenswrapper[4753]: I1205 17:05:12.063303 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:12 crc kubenswrapper[4753]: I1205 17:05:12.063340 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:12 crc kubenswrapper[4753]: I1205 17:05:12.063417 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:12Z","lastTransitionTime":"2025-12-05T17:05:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:12 crc kubenswrapper[4753]: I1205 17:05:12.165930 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:12 crc kubenswrapper[4753]: I1205 17:05:12.165969 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:12 crc kubenswrapper[4753]: I1205 17:05:12.165978 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:12 crc kubenswrapper[4753]: I1205 17:05:12.165994 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:12 crc kubenswrapper[4753]: I1205 17:05:12.166003 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:12Z","lastTransitionTime":"2025-12-05T17:05:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:12 crc kubenswrapper[4753]: I1205 17:05:12.268309 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:12 crc kubenswrapper[4753]: I1205 17:05:12.268356 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:12 crc kubenswrapper[4753]: I1205 17:05:12.268373 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:12 crc kubenswrapper[4753]: I1205 17:05:12.268395 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:12 crc kubenswrapper[4753]: I1205 17:05:12.268410 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:12Z","lastTransitionTime":"2025-12-05T17:05:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:12 crc kubenswrapper[4753]: I1205 17:05:12.372140 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:12 crc kubenswrapper[4753]: I1205 17:05:12.372231 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:12 crc kubenswrapper[4753]: I1205 17:05:12.372243 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:12 crc kubenswrapper[4753]: I1205 17:05:12.372263 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:12 crc kubenswrapper[4753]: I1205 17:05:12.372275 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:12Z","lastTransitionTime":"2025-12-05T17:05:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:12 crc kubenswrapper[4753]: I1205 17:05:12.475326 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:12 crc kubenswrapper[4753]: I1205 17:05:12.475378 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:12 crc kubenswrapper[4753]: I1205 17:05:12.475390 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:12 crc kubenswrapper[4753]: I1205 17:05:12.475406 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:12 crc kubenswrapper[4753]: I1205 17:05:12.475420 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:12Z","lastTransitionTime":"2025-12-05T17:05:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:12 crc kubenswrapper[4753]: I1205 17:05:12.578222 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:12 crc kubenswrapper[4753]: I1205 17:05:12.578283 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:12 crc kubenswrapper[4753]: I1205 17:05:12.578293 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:12 crc kubenswrapper[4753]: I1205 17:05:12.578311 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:12 crc kubenswrapper[4753]: I1205 17:05:12.578322 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:12Z","lastTransitionTime":"2025-12-05T17:05:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:12 crc kubenswrapper[4753]: I1205 17:05:12.680936 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:12 crc kubenswrapper[4753]: I1205 17:05:12.681005 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:12 crc kubenswrapper[4753]: I1205 17:05:12.681027 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:12 crc kubenswrapper[4753]: I1205 17:05:12.681058 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:12 crc kubenswrapper[4753]: I1205 17:05:12.681081 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:12Z","lastTransitionTime":"2025-12-05T17:05:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:12 crc kubenswrapper[4753]: I1205 17:05:12.694265 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:05:12 crc kubenswrapper[4753]: I1205 17:05:12.694385 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:05:12 crc kubenswrapper[4753]: E1205 17:05:12.694399 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:05:44.69437457 +0000 UTC m=+83.197481586 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:05:12 crc kubenswrapper[4753]: I1205 17:05:12.694435 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:05:12 crc kubenswrapper[4753]: I1205 17:05:12.694472 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:05:12 crc kubenswrapper[4753]: I1205 17:05:12.694526 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:05:12 crc kubenswrapper[4753]: E1205 17:05:12.694635 4753 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 17:05:12 crc kubenswrapper[4753]: E1205 17:05:12.694652 4753 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 17:05:12 crc kubenswrapper[4753]: E1205 17:05:12.694673 4753 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 17:05:12 crc kubenswrapper[4753]: E1205 17:05:12.694690 4753 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 17:05:12 crc kubenswrapper[4753]: E1205 17:05:12.694709 4753 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 17:05:12 crc kubenswrapper[4753]: E1205 17:05:12.694774 4753 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 17:05:12 crc kubenswrapper[4753]: E1205 17:05:12.694795 4753 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 17:05:12 crc kubenswrapper[4753]: E1205 17:05:12.694690 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 17:05:44.694674529 +0000 UTC m=+83.197781545 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 17:05:12 crc kubenswrapper[4753]: E1205 17:05:12.694712 4753 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 17:05:12 crc kubenswrapper[4753]: E1205 17:05:12.694900 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 17:05:44.694870085 +0000 UTC m=+83.197977171 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 17:05:12 crc kubenswrapper[4753]: E1205 17:05:12.694926 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 17:05:44.694913916 +0000 UTC m=+83.198021042 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 17:05:12 crc kubenswrapper[4753]: E1205 17:05:12.694950 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 17:05:44.694936037 +0000 UTC m=+83.198043153 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 17:05:12 crc kubenswrapper[4753]: I1205 17:05:12.719786 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:05:12 crc kubenswrapper[4753]: I1205 17:05:12.719874 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:05:12 crc kubenswrapper[4753]: I1205 17:05:12.719842 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jjgfd" Dec 05 17:05:12 crc kubenswrapper[4753]: E1205 17:05:12.720054 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:05:12 crc kubenswrapper[4753]: E1205 17:05:12.720270 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:05:12 crc kubenswrapper[4753]: E1205 17:05:12.720389 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jjgfd" podUID="00ab636b-9cc9-4a6f-8e6e-6442b35280ca" Dec 05 17:05:12 crc kubenswrapper[4753]: I1205 17:05:12.783886 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:12 crc kubenswrapper[4753]: I1205 17:05:12.783912 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:12 crc kubenswrapper[4753]: I1205 17:05:12.783920 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:12 crc kubenswrapper[4753]: I1205 17:05:12.783933 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:12 crc kubenswrapper[4753]: I1205 17:05:12.783942 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:12Z","lastTransitionTime":"2025-12-05T17:05:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:12 crc kubenswrapper[4753]: I1205 17:05:12.885936 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:12 crc kubenswrapper[4753]: I1205 17:05:12.886288 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:12 crc kubenswrapper[4753]: I1205 17:05:12.886399 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:12 crc kubenswrapper[4753]: I1205 17:05:12.886476 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:12 crc kubenswrapper[4753]: I1205 17:05:12.886537 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:12Z","lastTransitionTime":"2025-12-05T17:05:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:12 crc kubenswrapper[4753]: I1205 17:05:12.989752 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:12 crc kubenswrapper[4753]: I1205 17:05:12.989788 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:12 crc kubenswrapper[4753]: I1205 17:05:12.989799 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:12 crc kubenswrapper[4753]: I1205 17:05:12.989813 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:12 crc kubenswrapper[4753]: I1205 17:05:12.989823 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:12Z","lastTransitionTime":"2025-12-05T17:05:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:12 crc kubenswrapper[4753]: I1205 17:05:12.997464 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/00ab636b-9cc9-4a6f-8e6e-6442b35280ca-metrics-certs\") pod \"network-metrics-daemon-jjgfd\" (UID: \"00ab636b-9cc9-4a6f-8e6e-6442b35280ca\") " pod="openshift-multus/network-metrics-daemon-jjgfd" Dec 05 17:05:12 crc kubenswrapper[4753]: E1205 17:05:12.997595 4753 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 17:05:12 crc kubenswrapper[4753]: E1205 17:05:12.997636 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/00ab636b-9cc9-4a6f-8e6e-6442b35280ca-metrics-certs podName:00ab636b-9cc9-4a6f-8e6e-6442b35280ca nodeName:}" failed. No retries permitted until 2025-12-05 17:05:28.997621879 +0000 UTC m=+67.500728885 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/00ab636b-9cc9-4a6f-8e6e-6442b35280ca-metrics-certs") pod "network-metrics-daemon-jjgfd" (UID: "00ab636b-9cc9-4a6f-8e6e-6442b35280ca") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 17:05:13 crc kubenswrapper[4753]: I1205 17:05:13.092797 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:13 crc kubenswrapper[4753]: I1205 17:05:13.092860 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:13 crc kubenswrapper[4753]: I1205 17:05:13.092876 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:13 crc kubenswrapper[4753]: I1205 17:05:13.092901 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:13 crc kubenswrapper[4753]: I1205 17:05:13.092915 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:13Z","lastTransitionTime":"2025-12-05T17:05:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:13 crc kubenswrapper[4753]: I1205 17:05:13.196396 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:13 crc kubenswrapper[4753]: I1205 17:05:13.196480 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:13 crc kubenswrapper[4753]: I1205 17:05:13.196497 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:13 crc kubenswrapper[4753]: I1205 17:05:13.196519 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:13 crc kubenswrapper[4753]: I1205 17:05:13.196533 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:13Z","lastTransitionTime":"2025-12-05T17:05:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:13 crc kubenswrapper[4753]: I1205 17:05:13.298625 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:13 crc kubenswrapper[4753]: I1205 17:05:13.298666 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:13 crc kubenswrapper[4753]: I1205 17:05:13.298679 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:13 crc kubenswrapper[4753]: I1205 17:05:13.298694 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:13 crc kubenswrapper[4753]: I1205 17:05:13.298703 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:13Z","lastTransitionTime":"2025-12-05T17:05:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:13 crc kubenswrapper[4753]: I1205 17:05:13.401801 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:13 crc kubenswrapper[4753]: I1205 17:05:13.401840 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:13 crc kubenswrapper[4753]: I1205 17:05:13.401849 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:13 crc kubenswrapper[4753]: I1205 17:05:13.401862 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:13 crc kubenswrapper[4753]: I1205 17:05:13.401871 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:13Z","lastTransitionTime":"2025-12-05T17:05:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:13 crc kubenswrapper[4753]: I1205 17:05:13.504940 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:13 crc kubenswrapper[4753]: I1205 17:05:13.504997 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:13 crc kubenswrapper[4753]: I1205 17:05:13.505013 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:13 crc kubenswrapper[4753]: I1205 17:05:13.505036 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:13 crc kubenswrapper[4753]: I1205 17:05:13.505057 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:13Z","lastTransitionTime":"2025-12-05T17:05:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:13 crc kubenswrapper[4753]: I1205 17:05:13.608065 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:13 crc kubenswrapper[4753]: I1205 17:05:13.608140 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:13 crc kubenswrapper[4753]: I1205 17:05:13.608333 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:13 crc kubenswrapper[4753]: I1205 17:05:13.608371 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:13 crc kubenswrapper[4753]: I1205 17:05:13.608394 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:13Z","lastTransitionTime":"2025-12-05T17:05:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:13 crc kubenswrapper[4753]: I1205 17:05:13.711581 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:13 crc kubenswrapper[4753]: I1205 17:05:13.711635 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:13 crc kubenswrapper[4753]: I1205 17:05:13.711651 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:13 crc kubenswrapper[4753]: I1205 17:05:13.711669 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:13 crc kubenswrapper[4753]: I1205 17:05:13.711682 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:13Z","lastTransitionTime":"2025-12-05T17:05:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:13 crc kubenswrapper[4753]: I1205 17:05:13.719869 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:05:13 crc kubenswrapper[4753]: E1205 17:05:13.720080 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:05:13 crc kubenswrapper[4753]: I1205 17:05:13.815024 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:13 crc kubenswrapper[4753]: I1205 17:05:13.815061 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:13 crc kubenswrapper[4753]: I1205 17:05:13.815072 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:13 crc kubenswrapper[4753]: I1205 17:05:13.815182 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:13 crc kubenswrapper[4753]: I1205 17:05:13.815231 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:13Z","lastTransitionTime":"2025-12-05T17:05:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:13 crc kubenswrapper[4753]: I1205 17:05:13.918240 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:13 crc kubenswrapper[4753]: I1205 17:05:13.918295 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:13 crc kubenswrapper[4753]: I1205 17:05:13.918304 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:13 crc kubenswrapper[4753]: I1205 17:05:13.918320 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:13 crc kubenswrapper[4753]: I1205 17:05:13.918329 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:13Z","lastTransitionTime":"2025-12-05T17:05:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:14 crc kubenswrapper[4753]: I1205 17:05:14.021305 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:14 crc kubenswrapper[4753]: I1205 17:05:14.021379 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:14 crc kubenswrapper[4753]: I1205 17:05:14.021396 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:14 crc kubenswrapper[4753]: I1205 17:05:14.021426 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:14 crc kubenswrapper[4753]: I1205 17:05:14.021450 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:14Z","lastTransitionTime":"2025-12-05T17:05:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:14 crc kubenswrapper[4753]: I1205 17:05:14.124636 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:14 crc kubenswrapper[4753]: I1205 17:05:14.124756 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:14 crc kubenswrapper[4753]: I1205 17:05:14.124846 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:14 crc kubenswrapper[4753]: I1205 17:05:14.124880 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:14 crc kubenswrapper[4753]: I1205 17:05:14.124902 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:14Z","lastTransitionTime":"2025-12-05T17:05:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:14 crc kubenswrapper[4753]: I1205 17:05:14.228842 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:14 crc kubenswrapper[4753]: I1205 17:05:14.228907 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:14 crc kubenswrapper[4753]: I1205 17:05:14.228926 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:14 crc kubenswrapper[4753]: I1205 17:05:14.228952 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:14 crc kubenswrapper[4753]: I1205 17:05:14.228971 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:14Z","lastTransitionTime":"2025-12-05T17:05:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:14 crc kubenswrapper[4753]: I1205 17:05:14.331722 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:14 crc kubenswrapper[4753]: I1205 17:05:14.331754 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:14 crc kubenswrapper[4753]: I1205 17:05:14.331763 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:14 crc kubenswrapper[4753]: I1205 17:05:14.331779 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:14 crc kubenswrapper[4753]: I1205 17:05:14.331791 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:14Z","lastTransitionTime":"2025-12-05T17:05:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:14 crc kubenswrapper[4753]: I1205 17:05:14.434897 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:14 crc kubenswrapper[4753]: I1205 17:05:14.435548 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:14 crc kubenswrapper[4753]: I1205 17:05:14.435568 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:14 crc kubenswrapper[4753]: I1205 17:05:14.435597 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:14 crc kubenswrapper[4753]: I1205 17:05:14.435617 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:14Z","lastTransitionTime":"2025-12-05T17:05:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:14 crc kubenswrapper[4753]: I1205 17:05:14.538602 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:14 crc kubenswrapper[4753]: I1205 17:05:14.538667 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:14 crc kubenswrapper[4753]: I1205 17:05:14.538684 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:14 crc kubenswrapper[4753]: I1205 17:05:14.538710 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:14 crc kubenswrapper[4753]: I1205 17:05:14.538732 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:14Z","lastTransitionTime":"2025-12-05T17:05:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:14 crc kubenswrapper[4753]: I1205 17:05:14.642268 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:14 crc kubenswrapper[4753]: I1205 17:05:14.642338 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:14 crc kubenswrapper[4753]: I1205 17:05:14.642351 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:14 crc kubenswrapper[4753]: I1205 17:05:14.642374 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:14 crc kubenswrapper[4753]: I1205 17:05:14.642389 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:14Z","lastTransitionTime":"2025-12-05T17:05:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:14 crc kubenswrapper[4753]: I1205 17:05:14.719640 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:05:14 crc kubenswrapper[4753]: I1205 17:05:14.719717 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jjgfd" Dec 05 17:05:14 crc kubenswrapper[4753]: I1205 17:05:14.719783 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:05:14 crc kubenswrapper[4753]: E1205 17:05:14.719872 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:05:14 crc kubenswrapper[4753]: E1205 17:05:14.720031 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:05:14 crc kubenswrapper[4753]: E1205 17:05:14.720261 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jjgfd" podUID="00ab636b-9cc9-4a6f-8e6e-6442b35280ca" Dec 05 17:05:14 crc kubenswrapper[4753]: I1205 17:05:14.745441 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:14 crc kubenswrapper[4753]: I1205 17:05:14.745529 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:14 crc kubenswrapper[4753]: I1205 17:05:14.745544 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:14 crc kubenswrapper[4753]: I1205 17:05:14.745569 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:14 crc kubenswrapper[4753]: I1205 17:05:14.745607 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:14Z","lastTransitionTime":"2025-12-05T17:05:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:14 crc kubenswrapper[4753]: I1205 17:05:14.848701 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:14 crc kubenswrapper[4753]: I1205 17:05:14.848762 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:14 crc kubenswrapper[4753]: I1205 17:05:14.848784 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:14 crc kubenswrapper[4753]: I1205 17:05:14.848810 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:14 crc kubenswrapper[4753]: I1205 17:05:14.848831 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:14Z","lastTransitionTime":"2025-12-05T17:05:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:14 crc kubenswrapper[4753]: I1205 17:05:14.951654 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:14 crc kubenswrapper[4753]: I1205 17:05:14.951733 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:14 crc kubenswrapper[4753]: I1205 17:05:14.951753 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:14 crc kubenswrapper[4753]: I1205 17:05:14.951780 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:14 crc kubenswrapper[4753]: I1205 17:05:14.951803 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:14Z","lastTransitionTime":"2025-12-05T17:05:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:15 crc kubenswrapper[4753]: I1205 17:05:15.054350 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:15 crc kubenswrapper[4753]: I1205 17:05:15.054394 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:15 crc kubenswrapper[4753]: I1205 17:05:15.054404 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:15 crc kubenswrapper[4753]: I1205 17:05:15.054419 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:15 crc kubenswrapper[4753]: I1205 17:05:15.054429 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:15Z","lastTransitionTime":"2025-12-05T17:05:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:15 crc kubenswrapper[4753]: I1205 17:05:15.157585 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:15 crc kubenswrapper[4753]: I1205 17:05:15.157661 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:15 crc kubenswrapper[4753]: I1205 17:05:15.157682 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:15 crc kubenswrapper[4753]: I1205 17:05:15.157714 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:15 crc kubenswrapper[4753]: I1205 17:05:15.157735 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:15Z","lastTransitionTime":"2025-12-05T17:05:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:15 crc kubenswrapper[4753]: I1205 17:05:15.261227 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:15 crc kubenswrapper[4753]: I1205 17:05:15.261272 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:15 crc kubenswrapper[4753]: I1205 17:05:15.261282 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:15 crc kubenswrapper[4753]: I1205 17:05:15.261304 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:15 crc kubenswrapper[4753]: I1205 17:05:15.261318 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:15Z","lastTransitionTime":"2025-12-05T17:05:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:15 crc kubenswrapper[4753]: I1205 17:05:15.350695 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:15 crc kubenswrapper[4753]: I1205 17:05:15.350807 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:15 crc kubenswrapper[4753]: I1205 17:05:15.350860 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:15 crc kubenswrapper[4753]: I1205 17:05:15.350892 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:15 crc kubenswrapper[4753]: I1205 17:05:15.350947 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:15Z","lastTransitionTime":"2025-12-05T17:05:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:15 crc kubenswrapper[4753]: E1205 17:05:15.374783 4753 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:15Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:15Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6cf3541b-b15a-4035-99a1-dd4a7b86e07c\\\",\\\"systemUUID\\\":\\\"6ae126b1-60b3-4aa4-8711-3da4c1b89426\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:15Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:15 crc kubenswrapper[4753]: I1205 17:05:15.381695 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:15 crc kubenswrapper[4753]: I1205 17:05:15.381749 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:15 crc kubenswrapper[4753]: I1205 17:05:15.381769 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:15 crc kubenswrapper[4753]: I1205 17:05:15.381798 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:15 crc kubenswrapper[4753]: I1205 17:05:15.381817 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:15Z","lastTransitionTime":"2025-12-05T17:05:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:15 crc kubenswrapper[4753]: E1205 17:05:15.399276 4753 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:15Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:15Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6cf3541b-b15a-4035-99a1-dd4a7b86e07c\\\",\\\"systemUUID\\\":\\\"6ae126b1-60b3-4aa4-8711-3da4c1b89426\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:15Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:15 crc kubenswrapper[4753]: I1205 17:05:15.404661 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:15 crc kubenswrapper[4753]: I1205 17:05:15.404761 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:15 crc kubenswrapper[4753]: I1205 17:05:15.404827 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:15 crc kubenswrapper[4753]: I1205 17:05:15.404865 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:15 crc kubenswrapper[4753]: I1205 17:05:15.404892 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:15Z","lastTransitionTime":"2025-12-05T17:05:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:15 crc kubenswrapper[4753]: E1205 17:05:15.429670 4753 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:15Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:15Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6cf3541b-b15a-4035-99a1-dd4a7b86e07c\\\",\\\"systemUUID\\\":\\\"6ae126b1-60b3-4aa4-8711-3da4c1b89426\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:15Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:15 crc kubenswrapper[4753]: I1205 17:05:15.437596 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:15 crc kubenswrapper[4753]: I1205 17:05:15.437639 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:15 crc kubenswrapper[4753]: I1205 17:05:15.437650 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:15 crc kubenswrapper[4753]: I1205 17:05:15.437667 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:15 crc kubenswrapper[4753]: I1205 17:05:15.437682 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:15Z","lastTransitionTime":"2025-12-05T17:05:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:15 crc kubenswrapper[4753]: E1205 17:05:15.457959 4753 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:15Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:15Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6cf3541b-b15a-4035-99a1-dd4a7b86e07c\\\",\\\"systemUUID\\\":\\\"6ae126b1-60b3-4aa4-8711-3da4c1b89426\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:15Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:15 crc kubenswrapper[4753]: I1205 17:05:15.463474 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:15 crc kubenswrapper[4753]: I1205 17:05:15.463528 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:15 crc kubenswrapper[4753]: I1205 17:05:15.463544 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:15 crc kubenswrapper[4753]: I1205 17:05:15.463570 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:15 crc kubenswrapper[4753]: I1205 17:05:15.463584 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:15Z","lastTransitionTime":"2025-12-05T17:05:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:15 crc kubenswrapper[4753]: E1205 17:05:15.483383 4753 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:15Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:15Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:15Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6cf3541b-b15a-4035-99a1-dd4a7b86e07c\\\",\\\"systemUUID\\\":\\\"6ae126b1-60b3-4aa4-8711-3da4c1b89426\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:15Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:15 crc kubenswrapper[4753]: E1205 17:05:15.484038 4753 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 17:05:15 crc kubenswrapper[4753]: I1205 17:05:15.490205 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:15 crc kubenswrapper[4753]: I1205 17:05:15.490288 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:15 crc kubenswrapper[4753]: I1205 17:05:15.490309 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:15 crc kubenswrapper[4753]: I1205 17:05:15.490344 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:15 crc kubenswrapper[4753]: I1205 17:05:15.490367 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:15Z","lastTransitionTime":"2025-12-05T17:05:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:15 crc kubenswrapper[4753]: I1205 17:05:15.593624 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:15 crc kubenswrapper[4753]: I1205 17:05:15.593666 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:15 crc kubenswrapper[4753]: I1205 17:05:15.593675 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:15 crc kubenswrapper[4753]: I1205 17:05:15.593692 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:15 crc kubenswrapper[4753]: I1205 17:05:15.593702 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:15Z","lastTransitionTime":"2025-12-05T17:05:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:15 crc kubenswrapper[4753]: I1205 17:05:15.696513 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:15 crc kubenswrapper[4753]: I1205 17:05:15.696581 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:15 crc kubenswrapper[4753]: I1205 17:05:15.696598 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:15 crc kubenswrapper[4753]: I1205 17:05:15.696621 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:15 crc kubenswrapper[4753]: I1205 17:05:15.696637 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:15Z","lastTransitionTime":"2025-12-05T17:05:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:15 crc kubenswrapper[4753]: I1205 17:05:15.719518 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:05:15 crc kubenswrapper[4753]: E1205 17:05:15.719679 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:05:15 crc kubenswrapper[4753]: I1205 17:05:15.799488 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:15 crc kubenswrapper[4753]: I1205 17:05:15.799537 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:15 crc kubenswrapper[4753]: I1205 17:05:15.799548 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:15 crc kubenswrapper[4753]: I1205 17:05:15.799568 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:15 crc kubenswrapper[4753]: I1205 17:05:15.799580 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:15Z","lastTransitionTime":"2025-12-05T17:05:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:15 crc kubenswrapper[4753]: I1205 17:05:15.902320 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:15 crc kubenswrapper[4753]: I1205 17:05:15.902398 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:15 crc kubenswrapper[4753]: I1205 17:05:15.902409 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:15 crc kubenswrapper[4753]: I1205 17:05:15.902427 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:15 crc kubenswrapper[4753]: I1205 17:05:15.902438 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:15Z","lastTransitionTime":"2025-12-05T17:05:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:16 crc kubenswrapper[4753]: I1205 17:05:16.004874 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:16 crc kubenswrapper[4753]: I1205 17:05:16.004922 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:16 crc kubenswrapper[4753]: I1205 17:05:16.004930 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:16 crc kubenswrapper[4753]: I1205 17:05:16.004943 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:16 crc kubenswrapper[4753]: I1205 17:05:16.004952 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:16Z","lastTransitionTime":"2025-12-05T17:05:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:16 crc kubenswrapper[4753]: I1205 17:05:16.109074 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:16 crc kubenswrapper[4753]: I1205 17:05:16.109139 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:16 crc kubenswrapper[4753]: I1205 17:05:16.109173 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:16 crc kubenswrapper[4753]: I1205 17:05:16.109198 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:16 crc kubenswrapper[4753]: I1205 17:05:16.109208 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:16Z","lastTransitionTime":"2025-12-05T17:05:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:16 crc kubenswrapper[4753]: I1205 17:05:16.212114 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:16 crc kubenswrapper[4753]: I1205 17:05:16.212244 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:16 crc kubenswrapper[4753]: I1205 17:05:16.212268 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:16 crc kubenswrapper[4753]: I1205 17:05:16.212302 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:16 crc kubenswrapper[4753]: I1205 17:05:16.212323 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:16Z","lastTransitionTime":"2025-12-05T17:05:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:16 crc kubenswrapper[4753]: I1205 17:05:16.315083 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:16 crc kubenswrapper[4753]: I1205 17:05:16.315123 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:16 crc kubenswrapper[4753]: I1205 17:05:16.315132 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:16 crc kubenswrapper[4753]: I1205 17:05:16.315162 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:16 crc kubenswrapper[4753]: I1205 17:05:16.315174 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:16Z","lastTransitionTime":"2025-12-05T17:05:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:16 crc kubenswrapper[4753]: I1205 17:05:16.418000 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:16 crc kubenswrapper[4753]: I1205 17:05:16.418042 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:16 crc kubenswrapper[4753]: I1205 17:05:16.418051 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:16 crc kubenswrapper[4753]: I1205 17:05:16.418066 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:16 crc kubenswrapper[4753]: I1205 17:05:16.418075 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:16Z","lastTransitionTime":"2025-12-05T17:05:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:16 crc kubenswrapper[4753]: I1205 17:05:16.521117 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:16 crc kubenswrapper[4753]: I1205 17:05:16.521176 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:16 crc kubenswrapper[4753]: I1205 17:05:16.521186 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:16 crc kubenswrapper[4753]: I1205 17:05:16.521203 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:16 crc kubenswrapper[4753]: I1205 17:05:16.521213 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:16Z","lastTransitionTime":"2025-12-05T17:05:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:16 crc kubenswrapper[4753]: I1205 17:05:16.623296 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:16 crc kubenswrapper[4753]: I1205 17:05:16.623357 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:16 crc kubenswrapper[4753]: I1205 17:05:16.623373 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:16 crc kubenswrapper[4753]: I1205 17:05:16.623401 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:16 crc kubenswrapper[4753]: I1205 17:05:16.623418 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:16Z","lastTransitionTime":"2025-12-05T17:05:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:16 crc kubenswrapper[4753]: I1205 17:05:16.719628 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jjgfd" Dec 05 17:05:16 crc kubenswrapper[4753]: I1205 17:05:16.719695 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:05:16 crc kubenswrapper[4753]: I1205 17:05:16.719763 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:05:16 crc kubenswrapper[4753]: E1205 17:05:16.719899 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jjgfd" podUID="00ab636b-9cc9-4a6f-8e6e-6442b35280ca" Dec 05 17:05:16 crc kubenswrapper[4753]: E1205 17:05:16.720011 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:05:16 crc kubenswrapper[4753]: E1205 17:05:16.720483 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:05:16 crc kubenswrapper[4753]: I1205 17:05:16.720931 4753 scope.go:117] "RemoveContainer" containerID="ad2339c202e5abc4e88ebbfc57c630d5d76fd6fd5e9487a31159ae3d3ebb9970" Dec 05 17:05:16 crc kubenswrapper[4753]: I1205 17:05:16.725661 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:16 crc kubenswrapper[4753]: I1205 17:05:16.725698 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:16 crc kubenswrapper[4753]: I1205 17:05:16.725711 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:16 crc kubenswrapper[4753]: I1205 17:05:16.725728 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:16 crc kubenswrapper[4753]: I1205 17:05:16.725742 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:16Z","lastTransitionTime":"2025-12-05T17:05:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:16 crc kubenswrapper[4753]: I1205 17:05:16.828222 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:16 crc kubenswrapper[4753]: I1205 17:05:16.828266 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:16 crc kubenswrapper[4753]: I1205 17:05:16.828278 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:16 crc kubenswrapper[4753]: I1205 17:05:16.828296 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:16 crc kubenswrapper[4753]: I1205 17:05:16.828308 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:16Z","lastTransitionTime":"2025-12-05T17:05:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:16 crc kubenswrapper[4753]: I1205 17:05:16.930535 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:16 crc kubenswrapper[4753]: I1205 17:05:16.930578 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:16 crc kubenswrapper[4753]: I1205 17:05:16.930588 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:16 crc kubenswrapper[4753]: I1205 17:05:16.930606 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:16 crc kubenswrapper[4753]: I1205 17:05:16.930618 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:16Z","lastTransitionTime":"2025-12-05T17:05:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:17 crc kubenswrapper[4753]: I1205 17:05:17.033101 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:17 crc kubenswrapper[4753]: I1205 17:05:17.033134 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:17 crc kubenswrapper[4753]: I1205 17:05:17.033145 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:17 crc kubenswrapper[4753]: I1205 17:05:17.033177 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:17 crc kubenswrapper[4753]: I1205 17:05:17.033187 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:17Z","lastTransitionTime":"2025-12-05T17:05:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:17 crc kubenswrapper[4753]: I1205 17:05:17.039695 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-98fvv_f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a/ovnkube-controller/1.log" Dec 05 17:05:17 crc kubenswrapper[4753]: I1205 17:05:17.042440 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" event={"ID":"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a","Type":"ContainerStarted","Data":"9b6e87477fcc60efe5e27b6865aa0f91f58bb756f255afe613932a906bd6047a"} Dec 05 17:05:17 crc kubenswrapper[4753]: I1205 17:05:17.042920 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" Dec 05 17:05:17 crc kubenswrapper[4753]: I1205 17:05:17.054976 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78c3b80f4d08dc9fd4215fc87bbd7331f765f301fa587f5fd4ded54cd8db6587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60c5d0ca3d26171050d592c3d76f0db4ca1a12344c40825a8e7c5fd579814c80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-khn68\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:17Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:17 crc kubenswrapper[4753]: I1205 17:05:17.072481 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-jjgfd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"00ab636b-9cc9-4a6f-8e6e-6442b35280ca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhlg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhlg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:57Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-jjgfd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:17Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:17 crc kubenswrapper[4753]: I1205 17:05:17.095640 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://408690fbd818b50273c914cc463acec30ff77bbf7d26680bffa2451a1d1ddcf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:17Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:17 crc kubenswrapper[4753]: I1205 17:05:17.110244 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8934d1ec7490b421708e4678d69a55cdd381563ada3d945d066fcf4abba49f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e799acff1e50a664e95e30896e435bc77a35c8560044c5f180e16c3534bcf828\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:17Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:17 crc kubenswrapper[4753]: I1205 17:05:17.123470 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c52dbe30298514a572fd7cf4a301357ef0290af5894be7f9d08aff6c3fbe85a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:17Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:17 crc kubenswrapper[4753]: I1205 17:05:17.135113 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:17 crc kubenswrapper[4753]: I1205 17:05:17.135161 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:17 crc kubenswrapper[4753]: I1205 17:05:17.135171 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:17 crc kubenswrapper[4753]: I1205 17:05:17.135186 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:17 crc kubenswrapper[4753]: I1205 17:05:17.135196 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:17Z","lastTransitionTime":"2025-12-05T17:05:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:17 crc kubenswrapper[4753]: I1205 17:05:17.135955 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hpl8r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b2cc49130697a15123f202462d2f0781903baea370d4906d22d1b39c23320d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmzrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hpl8r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:17Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:17 crc kubenswrapper[4753]: I1205 17:05:17.148785 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee37fdbe-64d5-4bb1-8522-932d83e0648e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://195b8bf18e6cf937def214e377ec9def799c19ea4d5f008b66cc42048656ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a5f37b26bfe7933088e08379662250e7b188efbbb5784d49e8d86262e10416\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0061bf86592b301a0a9398d2938e614f76616ed7ddcf310aa50c961632fb58b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20eb41ef00b7711fdfef0e4f3bd83b2fbdb193be96cafc0ef87f048271e7dd78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fea878c0275a3934d26327c13cf5a93ae3b881132788f3d031af5b382bbdd02\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 17:04:35.064086 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 17:04:35.066802 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1773129596/tls.crt::/tmp/serving-cert-1773129596/tls.key\\\\\\\"\\\\nI1205 17:04:40.565607 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 17:04:40.585450 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 17:04:40.585477 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 17:04:40.585506 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 17:04:40.585513 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:04:40.600592 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:04:40.600617 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600622 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600626 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:04:40.600629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:04:40.600632 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:04:40.600635 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:04:40.600704 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 17:04:40.602558 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0585e546af65a61265267b97083556b811ad35ee6c48fa84262b3f4f25eaf907\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:17Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:17 crc kubenswrapper[4753]: I1205 17:05:17.160407 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:17Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:17 crc kubenswrapper[4753]: I1205 17:05:17.174985 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:17Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:17 crc kubenswrapper[4753]: I1205 17:05:17.184509 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vj5f7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f38be29-f040-4e7d-9026-36929c0c5cda\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b5d403644e00b367518058ecf22ed9913df9a1353e2fede8802deaa288a48e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc2cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vj5f7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:17Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:17 crc kubenswrapper[4753]: I1205 17:05:17.194165 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:17Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:17 crc kubenswrapper[4753]: I1205 17:05:17.205137 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65284411-042e-46b9-8e55-cabc9e78a397\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e83b48779d4b8b709f9bd7351180040315002f738ada5ab034c883f87ef8734\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f6e27e109eeb52dae4e4b35bdf84e78f0786fc8d15c2498dd8d8c70c3d64e4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7886d72b05768ec1d515cfb2837f52ae16e82445b72a3016762308b80892d55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a662a8ca2ca9bcf02ace463bf26e44293b9776c69f3f1f84110392c3cab6e83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:17Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:17 crc kubenswrapper[4753]: I1205 17:05:17.217574 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fb6627d3-0817-4737-b5c7-00b2abc67b0f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bba91e82256742fa5b98e2e481b8ab267ce89d71e180581f34fa371d03bbc1c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf90148205c7fde7c73b3ed1130fa5c9fc4b6562c0e1f4f6b009a72677f32f22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95027e80190e6caa0524ede7ef31c38c8861aeca94936b1a7a819fdc650c7969\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1bd3444395954ccae8d5c8137c6ebd7f62a6c864cea89f98a137af3e26cda8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1bd3444395954ccae8d5c8137c6ebd7f62a6c864cea89f98a137af3e26cda8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:17Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:17 crc kubenswrapper[4753]: I1205 17:05:17.235349 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2rg4s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"751d4d21-4eb8-4236-bca2-d81f094ff2f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bda355ba818e87604cf7cfba53f0ee3116f2d4234e7d9631ec7e6571ed34030\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa8d4e5672a07ea28ddba02eaaf603750cd3067ff47165776d2dc7f2ad42a290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa8d4e5672a07ea28ddba02eaaf603750cd3067ff47165776d2dc7f2ad42a290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87e685f0d2c857d143c3ee4b13adf2afe6796c40ae87e2a304b78783907a785a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87e685f0d2c857d143c3ee4b13adf2afe6796c40ae87e2a304b78783907a785a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://842a75d6b100069785dbaf413df3b610a5df28c9ec124100eb31c965985282fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://842a75d6b100069785dbaf413df3b610a5df28c9ec124100eb31c965985282fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4ac00f8ef346db3de91e7bec01a56eac9f27d5a501d6f686173ece4897b9694\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4ac00f8ef346db3de91e7bec01a56eac9f27d5a501d6f686173ece4897b9694\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3585437cbe0f17e5bf92a74123f0e6f13475b6e5d49b145f2b9b869559b53310\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3585437cbe0f17e5bf92a74123f0e6f13475b6e5d49b145f2b9b869559b53310\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e60aa96f2377e63e44e932efe51f79832b778b8ef41fd47e728a80286956c62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e60aa96f2377e63e44e932efe51f79832b778b8ef41fd47e728a80286956c62\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2rg4s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:17Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:17 crc kubenswrapper[4753]: I1205 17:05:17.237075 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:17 crc kubenswrapper[4753]: I1205 17:05:17.237110 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:17 crc kubenswrapper[4753]: I1205 17:05:17.237119 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:17 crc kubenswrapper[4753]: I1205 17:05:17.237136 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:17 crc kubenswrapper[4753]: I1205 17:05:17.237165 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:17Z","lastTransitionTime":"2025-12-05T17:05:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:17 crc kubenswrapper[4753]: I1205 17:05:17.253262 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f6585b3b62fc4a78f89b3413326d6d5259cb8c338936f2d5def6185d81d46f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cb72322d436f10b006a4bb7a91b255451aba90e86100fdf249be6443159bd89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d0273d889da0bd6ea3db8040da2eab62fc750f308cb9a62dee71b6c9f2fadd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e79e99eb44ff95c2f9929326313e41629e6e03ebc068537bd27bc4c89ad5b0ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20bcab2c2912120c819bacdf478db82a78500908125e6efadaef1f3409eb0d49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ead15f2a3179f03d770beadcaa1374561d5aebb4874615a334edccff0c2f300\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b6e87477fcc60efe5e27b6865aa0f91f58bb756f255afe613932a906bd6047a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad2339c202e5abc4e88ebbfc57c630d5d76fd6fd5e9487a31159ae3d3ebb9970\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"message\\\":\\\"tion, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:54Z is after 2025-08-24T17:21:41Z]\\\\nI1205 17:04:54.770375 6204 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g\\\\nI1205 17:04:54.770514 6204 ovn.go:134] Ensuring zone local for Pod openshift-machine-config-operator/machine-config-daemon-khn68 in node crc\\\\nI1205 17:04:54.770521 6204 ovn.go:134] Ensuring zone local for Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g in node crc\\\\nI1205 17:04:54.770512 6204 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-hpl8r\\\\nI1205 17:04:54.770522 6204 obj_retry.go:386] Retry successful for *v1.Pod openshift-machine-config-operator/machine-config-daemon-khn68 after 0 failed attempt(s)\\\\nI1205 17:04:54.770531 6204 default_network_controller.go:776] Recording success event on pod openshift\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:54Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:05:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://899db0b5d45776b0560a2908ce63ec8ea2b458e2e492189737e930688b4ed2e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://073ffcef7324b498f2cb47680b64bf44e474a7732a5c2b34661da22c3e0f6c17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://073ffcef7324b498f2cb47680b64bf44e474a7732a5c2b34661da22c3e0f6c17\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-98fvv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:17Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:17 crc kubenswrapper[4753]: I1205 17:05:17.264074 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-f6qn6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"693c26bb-e75f-4f7f-bd2a-bdf7dcb0af06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02ea0085a961c324f4a426d68972043f4b24b3aa03aed623ebc46bbafc857d1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g4w6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-f6qn6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:17Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:17 crc kubenswrapper[4753]: I1205 17:05:17.275041 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bhvk4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"338d3da9-209c-4ca9-a37d-6ea5731d1622\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7747e4738055a7b994119bad9b30d47e0b510c7407c53df29b674c553dbbcbb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qh6zg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://097885ac231f081a81d51cf0091df6f93ca49068340438454db429194e5475b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qh6zg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-bhvk4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:17Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:17 crc kubenswrapper[4753]: I1205 17:05:17.340076 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:17 crc kubenswrapper[4753]: I1205 17:05:17.340115 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:17 crc kubenswrapper[4753]: I1205 17:05:17.340128 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:17 crc kubenswrapper[4753]: I1205 17:05:17.340158 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:17 crc kubenswrapper[4753]: I1205 17:05:17.340168 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:17Z","lastTransitionTime":"2025-12-05T17:05:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:17 crc kubenswrapper[4753]: I1205 17:05:17.442138 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:17 crc kubenswrapper[4753]: I1205 17:05:17.442193 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:17 crc kubenswrapper[4753]: I1205 17:05:17.442207 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:17 crc kubenswrapper[4753]: I1205 17:05:17.442222 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:17 crc kubenswrapper[4753]: I1205 17:05:17.442232 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:17Z","lastTransitionTime":"2025-12-05T17:05:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:17 crc kubenswrapper[4753]: I1205 17:05:17.544645 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:17 crc kubenswrapper[4753]: I1205 17:05:17.544685 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:17 crc kubenswrapper[4753]: I1205 17:05:17.544696 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:17 crc kubenswrapper[4753]: I1205 17:05:17.544710 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:17 crc kubenswrapper[4753]: I1205 17:05:17.544719 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:17Z","lastTransitionTime":"2025-12-05T17:05:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:17 crc kubenswrapper[4753]: I1205 17:05:17.646719 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:17 crc kubenswrapper[4753]: I1205 17:05:17.646755 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:17 crc kubenswrapper[4753]: I1205 17:05:17.646763 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:17 crc kubenswrapper[4753]: I1205 17:05:17.646781 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:17 crc kubenswrapper[4753]: I1205 17:05:17.646789 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:17Z","lastTransitionTime":"2025-12-05T17:05:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:17 crc kubenswrapper[4753]: I1205 17:05:17.719929 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:05:17 crc kubenswrapper[4753]: E1205 17:05:17.720088 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:05:17 crc kubenswrapper[4753]: I1205 17:05:17.749047 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:17 crc kubenswrapper[4753]: I1205 17:05:17.749114 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:17 crc kubenswrapper[4753]: I1205 17:05:17.749129 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:17 crc kubenswrapper[4753]: I1205 17:05:17.749176 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:17 crc kubenswrapper[4753]: I1205 17:05:17.749196 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:17Z","lastTransitionTime":"2025-12-05T17:05:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:17 crc kubenswrapper[4753]: I1205 17:05:17.850802 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:17 crc kubenswrapper[4753]: I1205 17:05:17.850851 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:17 crc kubenswrapper[4753]: I1205 17:05:17.850863 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:17 crc kubenswrapper[4753]: I1205 17:05:17.850880 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:17 crc kubenswrapper[4753]: I1205 17:05:17.850892 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:17Z","lastTransitionTime":"2025-12-05T17:05:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:17 crc kubenswrapper[4753]: I1205 17:05:17.953277 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:17 crc kubenswrapper[4753]: I1205 17:05:17.953345 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:17 crc kubenswrapper[4753]: I1205 17:05:17.953368 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:17 crc kubenswrapper[4753]: I1205 17:05:17.953396 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:17 crc kubenswrapper[4753]: I1205 17:05:17.953413 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:17Z","lastTransitionTime":"2025-12-05T17:05:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.047346 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-98fvv_f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a/ovnkube-controller/2.log" Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.047997 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-98fvv_f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a/ovnkube-controller/1.log" Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.050937 4753 generic.go:334] "Generic (PLEG): container finished" podID="f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" containerID="9b6e87477fcc60efe5e27b6865aa0f91f58bb756f255afe613932a906bd6047a" exitCode=1 Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.050983 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" event={"ID":"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a","Type":"ContainerDied","Data":"9b6e87477fcc60efe5e27b6865aa0f91f58bb756f255afe613932a906bd6047a"} Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.051020 4753 scope.go:117] "RemoveContainer" containerID="ad2339c202e5abc4e88ebbfc57c630d5d76fd6fd5e9487a31159ae3d3ebb9970" Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.051582 4753 scope.go:117] "RemoveContainer" containerID="9b6e87477fcc60efe5e27b6865aa0f91f58bb756f255afe613932a906bd6047a" Dec 05 17:05:18 crc kubenswrapper[4753]: E1205 17:05:18.051721 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-98fvv_openshift-ovn-kubernetes(f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a)\"" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" podUID="f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.054847 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.054888 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.054900 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.054917 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.054928 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:18Z","lastTransitionTime":"2025-12-05T17:05:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.063862 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:18Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.074990 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:18Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.086329 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:18Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.095105 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vj5f7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f38be29-f040-4e7d-9026-36929c0c5cda\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b5d403644e00b367518058ecf22ed9913df9a1353e2fede8802deaa288a48e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc2cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vj5f7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:18Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.104103 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bhvk4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"338d3da9-209c-4ca9-a37d-6ea5731d1622\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7747e4738055a7b994119bad9b30d47e0b510c7407c53df29b674c553dbbcbb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qh6zg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://097885ac231f081a81d51cf0091df6f93ca49068340438454db429194e5475b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qh6zg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-bhvk4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:18Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.114035 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65284411-042e-46b9-8e55-cabc9e78a397\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e83b48779d4b8b709f9bd7351180040315002f738ada5ab034c883f87ef8734\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f6e27e109eeb52dae4e4b35bdf84e78f0786fc8d15c2498dd8d8c70c3d64e4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7886d72b05768ec1d515cfb2837f52ae16e82445b72a3016762308b80892d55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a662a8ca2ca9bcf02ace463bf26e44293b9776c69f3f1f84110392c3cab6e83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:18Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.123227 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fb6627d3-0817-4737-b5c7-00b2abc67b0f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bba91e82256742fa5b98e2e481b8ab267ce89d71e180581f34fa371d03bbc1c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf90148205c7fde7c73b3ed1130fa5c9fc4b6562c0e1f4f6b009a72677f32f22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95027e80190e6caa0524ede7ef31c38c8861aeca94936b1a7a819fdc650c7969\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1bd3444395954ccae8d5c8137c6ebd7f62a6c864cea89f98a137af3e26cda8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1bd3444395954ccae8d5c8137c6ebd7f62a6c864cea89f98a137af3e26cda8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:18Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.135544 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2rg4s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"751d4d21-4eb8-4236-bca2-d81f094ff2f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bda355ba818e87604cf7cfba53f0ee3116f2d4234e7d9631ec7e6571ed34030\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa8d4e5672a07ea28ddba02eaaf603750cd3067ff47165776d2dc7f2ad42a290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa8d4e5672a07ea28ddba02eaaf603750cd3067ff47165776d2dc7f2ad42a290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87e685f0d2c857d143c3ee4b13adf2afe6796c40ae87e2a304b78783907a785a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87e685f0d2c857d143c3ee4b13adf2afe6796c40ae87e2a304b78783907a785a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://842a75d6b100069785dbaf413df3b610a5df28c9ec124100eb31c965985282fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://842a75d6b100069785dbaf413df3b610a5df28c9ec124100eb31c965985282fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4ac00f8ef346db3de91e7bec01a56eac9f27d5a501d6f686173ece4897b9694\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4ac00f8ef346db3de91e7bec01a56eac9f27d5a501d6f686173ece4897b9694\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3585437cbe0f17e5bf92a74123f0e6f13475b6e5d49b145f2b9b869559b53310\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3585437cbe0f17e5bf92a74123f0e6f13475b6e5d49b145f2b9b869559b53310\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e60aa96f2377e63e44e932efe51f79832b778b8ef41fd47e728a80286956c62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e60aa96f2377e63e44e932efe51f79832b778b8ef41fd47e728a80286956c62\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2rg4s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:18Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.151670 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f6585b3b62fc4a78f89b3413326d6d5259cb8c338936f2d5def6185d81d46f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cb72322d436f10b006a4bb7a91b255451aba90e86100fdf249be6443159bd89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d0273d889da0bd6ea3db8040da2eab62fc750f308cb9a62dee71b6c9f2fadd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e79e99eb44ff95c2f9929326313e41629e6e03ebc068537bd27bc4c89ad5b0ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20bcab2c2912120c819bacdf478db82a78500908125e6efadaef1f3409eb0d49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ead15f2a3179f03d770beadcaa1374561d5aebb4874615a334edccff0c2f300\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b6e87477fcc60efe5e27b6865aa0f91f58bb756f255afe613932a906bd6047a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad2339c202e5abc4e88ebbfc57c630d5d76fd6fd5e9487a31159ae3d3ebb9970\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"message\\\":\\\"tion, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:04:54Z is after 2025-08-24T17:21:41Z]\\\\nI1205 17:04:54.770375 6204 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g\\\\nI1205 17:04:54.770514 6204 ovn.go:134] Ensuring zone local for Pod openshift-machine-config-operator/machine-config-daemon-khn68 in node crc\\\\nI1205 17:04:54.770521 6204 ovn.go:134] Ensuring zone local for Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g in node crc\\\\nI1205 17:04:54.770512 6204 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/multus-hpl8r\\\\nI1205 17:04:54.770522 6204 obj_retry.go:386] Retry successful for *v1.Pod openshift-machine-config-operator/machine-config-daemon-khn68 after 0 failed attempt(s)\\\\nI1205 17:04:54.770531 6204 default_network_controller.go:776] Recording success event on pod openshift\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:54Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b6e87477fcc60efe5e27b6865aa0f91f58bb756f255afe613932a906bd6047a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:05:17Z\\\",\\\"message\\\":\\\"all/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 17:05:17.523236 6490 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 17:05:17.523267 6490 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1205 17:05:17.523298 6490 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1205 17:05:17.523307 6490 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1205 17:05:17.523331 6490 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1205 17:05:17.523345 6490 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1205 17:05:17.523344 6490 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1205 17:05:17.523350 6490 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1205 17:05:17.523358 6490 handler.go:208] Removed *v1.Node event handler 7\\\\nI1205 17:05:17.523364 6490 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 17:05:17.523371 6490 handler.go:208] Removed *v1.Node event handler 2\\\\nI1205 17:05:17.523370 6490 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 17:05:17.523388 6490 factory.go:656] Stopping watch factory\\\\nI1205 17:05:17.523400 6490 ovnkube.go:599] Stopped ovnkube\\\\nI1205 17:05:17.523437 6490 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 17\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:05:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://899db0b5d45776b0560a2908ce63ec8ea2b458e2e492189737e930688b4ed2e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://073ffcef7324b498f2cb47680b64bf44e474a7732a5c2b34661da22c3e0f6c17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://073ffcef7324b498f2cb47680b64bf44e474a7732a5c2b34661da22c3e0f6c17\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-98fvv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:18Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.156808 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.156841 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.156857 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.156873 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.156884 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:18Z","lastTransitionTime":"2025-12-05T17:05:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.161485 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-f6qn6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"693c26bb-e75f-4f7f-bd2a-bdf7dcb0af06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02ea0085a961c324f4a426d68972043f4b24b3aa03aed623ebc46bbafc857d1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g4w6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-f6qn6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:18Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.171625 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://408690fbd818b50273c914cc463acec30ff77bbf7d26680bffa2451a1d1ddcf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:18Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.180615 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78c3b80f4d08dc9fd4215fc87bbd7331f765f301fa587f5fd4ded54cd8db6587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60c5d0ca3d26171050d592c3d76f0db4ca1a12344c40825a8e7c5fd579814c80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-khn68\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:18Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.190052 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-jjgfd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"00ab636b-9cc9-4a6f-8e6e-6442b35280ca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhlg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhlg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:57Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-jjgfd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:18Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.205000 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee37fdbe-64d5-4bb1-8522-932d83e0648e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://195b8bf18e6cf937def214e377ec9def799c19ea4d5f008b66cc42048656ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a5f37b26bfe7933088e08379662250e7b188efbbb5784d49e8d86262e10416\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0061bf86592b301a0a9398d2938e614f76616ed7ddcf310aa50c961632fb58b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20eb41ef00b7711fdfef0e4f3bd83b2fbdb193be96cafc0ef87f048271e7dd78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fea878c0275a3934d26327c13cf5a93ae3b881132788f3d031af5b382bbdd02\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 17:04:35.064086 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 17:04:35.066802 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1773129596/tls.crt::/tmp/serving-cert-1773129596/tls.key\\\\\\\"\\\\nI1205 17:04:40.565607 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 17:04:40.585450 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 17:04:40.585477 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 17:04:40.585506 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 17:04:40.585513 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:04:40.600592 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:04:40.600617 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600622 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600626 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:04:40.600629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:04:40.600632 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:04:40.600635 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:04:40.600704 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 17:04:40.602558 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0585e546af65a61265267b97083556b811ad35ee6c48fa84262b3f4f25eaf907\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:18Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.216720 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8934d1ec7490b421708e4678d69a55cdd381563ada3d945d066fcf4abba49f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e799acff1e50a664e95e30896e435bc77a35c8560044c5f180e16c3534bcf828\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:18Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.228681 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c52dbe30298514a572fd7cf4a301357ef0290af5894be7f9d08aff6c3fbe85a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:18Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.239424 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hpl8r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b2cc49130697a15123f202462d2f0781903baea370d4906d22d1b39c23320d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmzrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hpl8r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:18Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.259068 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.259105 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.259115 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.259131 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.259162 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:18Z","lastTransitionTime":"2025-12-05T17:05:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.361644 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.361907 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.361919 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.361933 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.361942 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:18Z","lastTransitionTime":"2025-12-05T17:05:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.464364 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.464409 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.464421 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.464439 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.464452 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:18Z","lastTransitionTime":"2025-12-05T17:05:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.566938 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.566978 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.566987 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.567000 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.567009 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:18Z","lastTransitionTime":"2025-12-05T17:05:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.669141 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.669208 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.669220 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.669232 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.669242 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:18Z","lastTransitionTime":"2025-12-05T17:05:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.720259 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:05:18 crc kubenswrapper[4753]: E1205 17:05:18.720395 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.720462 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.720264 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jjgfd" Dec 05 17:05:18 crc kubenswrapper[4753]: E1205 17:05:18.720549 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:05:18 crc kubenswrapper[4753]: E1205 17:05:18.720571 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jjgfd" podUID="00ab636b-9cc9-4a6f-8e6e-6442b35280ca" Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.771711 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.771942 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.772027 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.772113 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.772218 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:18Z","lastTransitionTime":"2025-12-05T17:05:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.873772 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.873817 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.873832 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.873854 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.873869 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:18Z","lastTransitionTime":"2025-12-05T17:05:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.976561 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.976884 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.976954 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.977032 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:18 crc kubenswrapper[4753]: I1205 17:05:18.977099 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:18Z","lastTransitionTime":"2025-12-05T17:05:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:19 crc kubenswrapper[4753]: I1205 17:05:19.054745 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-98fvv_f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a/ovnkube-controller/2.log" Dec 05 17:05:19 crc kubenswrapper[4753]: I1205 17:05:19.058573 4753 scope.go:117] "RemoveContainer" containerID="9b6e87477fcc60efe5e27b6865aa0f91f58bb756f255afe613932a906bd6047a" Dec 05 17:05:19 crc kubenswrapper[4753]: E1205 17:05:19.058938 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-98fvv_openshift-ovn-kubernetes(f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a)\"" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" podUID="f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" Dec 05 17:05:19 crc kubenswrapper[4753]: I1205 17:05:19.071337 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78c3b80f4d08dc9fd4215fc87bbd7331f765f301fa587f5fd4ded54cd8db6587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60c5d0ca3d26171050d592c3d76f0db4ca1a12344c40825a8e7c5fd579814c80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-khn68\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:19Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:19 crc kubenswrapper[4753]: I1205 17:05:19.080084 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:19 crc kubenswrapper[4753]: I1205 17:05:19.080138 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:19 crc kubenswrapper[4753]: I1205 17:05:19.080170 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:19 crc kubenswrapper[4753]: I1205 17:05:19.080187 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:19 crc kubenswrapper[4753]: I1205 17:05:19.080196 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:19Z","lastTransitionTime":"2025-12-05T17:05:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:19 crc kubenswrapper[4753]: I1205 17:05:19.084711 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-jjgfd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"00ab636b-9cc9-4a6f-8e6e-6442b35280ca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhlg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhlg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:57Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-jjgfd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:19Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:19 crc kubenswrapper[4753]: I1205 17:05:19.097472 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://408690fbd818b50273c914cc463acec30ff77bbf7d26680bffa2451a1d1ddcf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:19Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:19 crc kubenswrapper[4753]: I1205 17:05:19.110575 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8934d1ec7490b421708e4678d69a55cdd381563ada3d945d066fcf4abba49f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e799acff1e50a664e95e30896e435bc77a35c8560044c5f180e16c3534bcf828\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:19Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:19 crc kubenswrapper[4753]: I1205 17:05:19.125913 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c52dbe30298514a572fd7cf4a301357ef0290af5894be7f9d08aff6c3fbe85a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:19Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:19 crc kubenswrapper[4753]: I1205 17:05:19.138954 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hpl8r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b2cc49130697a15123f202462d2f0781903baea370d4906d22d1b39c23320d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmzrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hpl8r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:19Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:19 crc kubenswrapper[4753]: I1205 17:05:19.153773 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee37fdbe-64d5-4bb1-8522-932d83e0648e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://195b8bf18e6cf937def214e377ec9def799c19ea4d5f008b66cc42048656ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a5f37b26bfe7933088e08379662250e7b188efbbb5784d49e8d86262e10416\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0061bf86592b301a0a9398d2938e614f76616ed7ddcf310aa50c961632fb58b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20eb41ef00b7711fdfef0e4f3bd83b2fbdb193be96cafc0ef87f048271e7dd78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fea878c0275a3934d26327c13cf5a93ae3b881132788f3d031af5b382bbdd02\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 17:04:35.064086 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 17:04:35.066802 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1773129596/tls.crt::/tmp/serving-cert-1773129596/tls.key\\\\\\\"\\\\nI1205 17:04:40.565607 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 17:04:40.585450 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 17:04:40.585477 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 17:04:40.585506 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 17:04:40.585513 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:04:40.600592 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:04:40.600617 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600622 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600626 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:04:40.600629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:04:40.600632 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:04:40.600635 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:04:40.600704 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 17:04:40.602558 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0585e546af65a61265267b97083556b811ad35ee6c48fa84262b3f4f25eaf907\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:19Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:19 crc kubenswrapper[4753]: I1205 17:05:19.169101 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:19Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:19 crc kubenswrapper[4753]: I1205 17:05:19.182296 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:19Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:19 crc kubenswrapper[4753]: I1205 17:05:19.184525 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:19 crc kubenswrapper[4753]: I1205 17:05:19.184753 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:19 crc kubenswrapper[4753]: I1205 17:05:19.184908 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:19 crc kubenswrapper[4753]: I1205 17:05:19.185026 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:19 crc kubenswrapper[4753]: I1205 17:05:19.185270 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:19Z","lastTransitionTime":"2025-12-05T17:05:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:19 crc kubenswrapper[4753]: I1205 17:05:19.191721 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vj5f7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f38be29-f040-4e7d-9026-36929c0c5cda\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b5d403644e00b367518058ecf22ed9913df9a1353e2fede8802deaa288a48e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc2cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vj5f7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:19Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:19 crc kubenswrapper[4753]: I1205 17:05:19.203049 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:19Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:19 crc kubenswrapper[4753]: I1205 17:05:19.216880 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65284411-042e-46b9-8e55-cabc9e78a397\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e83b48779d4b8b709f9bd7351180040315002f738ada5ab034c883f87ef8734\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f6e27e109eeb52dae4e4b35bdf84e78f0786fc8d15c2498dd8d8c70c3d64e4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7886d72b05768ec1d515cfb2837f52ae16e82445b72a3016762308b80892d55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a662a8ca2ca9bcf02ace463bf26e44293b9776c69f3f1f84110392c3cab6e83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:19Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:19 crc kubenswrapper[4753]: I1205 17:05:19.228802 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fb6627d3-0817-4737-b5c7-00b2abc67b0f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bba91e82256742fa5b98e2e481b8ab267ce89d71e180581f34fa371d03bbc1c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf90148205c7fde7c73b3ed1130fa5c9fc4b6562c0e1f4f6b009a72677f32f22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95027e80190e6caa0524ede7ef31c38c8861aeca94936b1a7a819fdc650c7969\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1bd3444395954ccae8d5c8137c6ebd7f62a6c864cea89f98a137af3e26cda8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1bd3444395954ccae8d5c8137c6ebd7f62a6c864cea89f98a137af3e26cda8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:19Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:19 crc kubenswrapper[4753]: I1205 17:05:19.242261 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2rg4s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"751d4d21-4eb8-4236-bca2-d81f094ff2f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bda355ba818e87604cf7cfba53f0ee3116f2d4234e7d9631ec7e6571ed34030\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa8d4e5672a07ea28ddba02eaaf603750cd3067ff47165776d2dc7f2ad42a290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa8d4e5672a07ea28ddba02eaaf603750cd3067ff47165776d2dc7f2ad42a290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87e685f0d2c857d143c3ee4b13adf2afe6796c40ae87e2a304b78783907a785a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87e685f0d2c857d143c3ee4b13adf2afe6796c40ae87e2a304b78783907a785a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://842a75d6b100069785dbaf413df3b610a5df28c9ec124100eb31c965985282fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://842a75d6b100069785dbaf413df3b610a5df28c9ec124100eb31c965985282fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4ac00f8ef346db3de91e7bec01a56eac9f27d5a501d6f686173ece4897b9694\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4ac00f8ef346db3de91e7bec01a56eac9f27d5a501d6f686173ece4897b9694\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3585437cbe0f17e5bf92a74123f0e6f13475b6e5d49b145f2b9b869559b53310\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3585437cbe0f17e5bf92a74123f0e6f13475b6e5d49b145f2b9b869559b53310\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e60aa96f2377e63e44e932efe51f79832b778b8ef41fd47e728a80286956c62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e60aa96f2377e63e44e932efe51f79832b778b8ef41fd47e728a80286956c62\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2rg4s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:19Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:19 crc kubenswrapper[4753]: I1205 17:05:19.259264 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f6585b3b62fc4a78f89b3413326d6d5259cb8c338936f2d5def6185d81d46f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cb72322d436f10b006a4bb7a91b255451aba90e86100fdf249be6443159bd89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d0273d889da0bd6ea3db8040da2eab62fc750f308cb9a62dee71b6c9f2fadd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e79e99eb44ff95c2f9929326313e41629e6e03ebc068537bd27bc4c89ad5b0ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20bcab2c2912120c819bacdf478db82a78500908125e6efadaef1f3409eb0d49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ead15f2a3179f03d770beadcaa1374561d5aebb4874615a334edccff0c2f300\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b6e87477fcc60efe5e27b6865aa0f91f58bb756f255afe613932a906bd6047a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b6e87477fcc60efe5e27b6865aa0f91f58bb756f255afe613932a906bd6047a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:05:17Z\\\",\\\"message\\\":\\\"all/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 17:05:17.523236 6490 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 17:05:17.523267 6490 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1205 17:05:17.523298 6490 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1205 17:05:17.523307 6490 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1205 17:05:17.523331 6490 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1205 17:05:17.523345 6490 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1205 17:05:17.523344 6490 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1205 17:05:17.523350 6490 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1205 17:05:17.523358 6490 handler.go:208] Removed *v1.Node event handler 7\\\\nI1205 17:05:17.523364 6490 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 17:05:17.523371 6490 handler.go:208] Removed *v1.Node event handler 2\\\\nI1205 17:05:17.523370 6490 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 17:05:17.523388 6490 factory.go:656] Stopping watch factory\\\\nI1205 17:05:17.523400 6490 ovnkube.go:599] Stopped ovnkube\\\\nI1205 17:05:17.523437 6490 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 17\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:05:16Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-98fvv_openshift-ovn-kubernetes(f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://899db0b5d45776b0560a2908ce63ec8ea2b458e2e492189737e930688b4ed2e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://073ffcef7324b498f2cb47680b64bf44e474a7732a5c2b34661da22c3e0f6c17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://073ffcef7324b498f2cb47680b64bf44e474a7732a5c2b34661da22c3e0f6c17\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-98fvv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:19Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:19 crc kubenswrapper[4753]: I1205 17:05:19.268908 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-f6qn6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"693c26bb-e75f-4f7f-bd2a-bdf7dcb0af06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02ea0085a961c324f4a426d68972043f4b24b3aa03aed623ebc46bbafc857d1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g4w6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-f6qn6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:19Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:19 crc kubenswrapper[4753]: I1205 17:05:19.279894 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bhvk4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"338d3da9-209c-4ca9-a37d-6ea5731d1622\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7747e4738055a7b994119bad9b30d47e0b510c7407c53df29b674c553dbbcbb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qh6zg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://097885ac231f081a81d51cf0091df6f93ca49068340438454db429194e5475b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qh6zg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-bhvk4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:19Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:19 crc kubenswrapper[4753]: I1205 17:05:19.287562 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:19 crc kubenswrapper[4753]: I1205 17:05:19.287603 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:19 crc kubenswrapper[4753]: I1205 17:05:19.287614 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:19 crc kubenswrapper[4753]: I1205 17:05:19.287630 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:19 crc kubenswrapper[4753]: I1205 17:05:19.287641 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:19Z","lastTransitionTime":"2025-12-05T17:05:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:19 crc kubenswrapper[4753]: I1205 17:05:19.389470 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:19 crc kubenswrapper[4753]: I1205 17:05:19.389505 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:19 crc kubenswrapper[4753]: I1205 17:05:19.389515 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:19 crc kubenswrapper[4753]: I1205 17:05:19.389529 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:19 crc kubenswrapper[4753]: I1205 17:05:19.389539 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:19Z","lastTransitionTime":"2025-12-05T17:05:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:19 crc kubenswrapper[4753]: I1205 17:05:19.491395 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:19 crc kubenswrapper[4753]: I1205 17:05:19.491442 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:19 crc kubenswrapper[4753]: I1205 17:05:19.491451 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:19 crc kubenswrapper[4753]: I1205 17:05:19.491466 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:19 crc kubenswrapper[4753]: I1205 17:05:19.491476 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:19Z","lastTransitionTime":"2025-12-05T17:05:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:19 crc kubenswrapper[4753]: I1205 17:05:19.593646 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:19 crc kubenswrapper[4753]: I1205 17:05:19.593978 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:19 crc kubenswrapper[4753]: I1205 17:05:19.594054 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:19 crc kubenswrapper[4753]: I1205 17:05:19.594121 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:19 crc kubenswrapper[4753]: I1205 17:05:19.594214 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:19Z","lastTransitionTime":"2025-12-05T17:05:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:19 crc kubenswrapper[4753]: I1205 17:05:19.696593 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:19 crc kubenswrapper[4753]: I1205 17:05:19.697098 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:19 crc kubenswrapper[4753]: I1205 17:05:19.697184 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:19 crc kubenswrapper[4753]: I1205 17:05:19.697313 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:19 crc kubenswrapper[4753]: I1205 17:05:19.697377 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:19Z","lastTransitionTime":"2025-12-05T17:05:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:19 crc kubenswrapper[4753]: I1205 17:05:19.720262 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:05:19 crc kubenswrapper[4753]: E1205 17:05:19.720597 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:05:19 crc kubenswrapper[4753]: I1205 17:05:19.799281 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:19 crc kubenswrapper[4753]: I1205 17:05:19.799333 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:19 crc kubenswrapper[4753]: I1205 17:05:19.799345 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:19 crc kubenswrapper[4753]: I1205 17:05:19.799363 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:19 crc kubenswrapper[4753]: I1205 17:05:19.799372 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:19Z","lastTransitionTime":"2025-12-05T17:05:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:19 crc kubenswrapper[4753]: I1205 17:05:19.901344 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:19 crc kubenswrapper[4753]: I1205 17:05:19.901385 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:19 crc kubenswrapper[4753]: I1205 17:05:19.901396 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:19 crc kubenswrapper[4753]: I1205 17:05:19.901411 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:19 crc kubenswrapper[4753]: I1205 17:05:19.901420 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:19Z","lastTransitionTime":"2025-12-05T17:05:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:20 crc kubenswrapper[4753]: I1205 17:05:20.003779 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:20 crc kubenswrapper[4753]: I1205 17:05:20.004252 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:20 crc kubenswrapper[4753]: I1205 17:05:20.004416 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:20 crc kubenswrapper[4753]: I1205 17:05:20.004571 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:20 crc kubenswrapper[4753]: I1205 17:05:20.004733 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:20Z","lastTransitionTime":"2025-12-05T17:05:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:20 crc kubenswrapper[4753]: I1205 17:05:20.107926 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:20 crc kubenswrapper[4753]: I1205 17:05:20.108310 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:20 crc kubenswrapper[4753]: I1205 17:05:20.108375 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:20 crc kubenswrapper[4753]: I1205 17:05:20.108445 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:20 crc kubenswrapper[4753]: I1205 17:05:20.108509 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:20Z","lastTransitionTime":"2025-12-05T17:05:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:20 crc kubenswrapper[4753]: I1205 17:05:20.210893 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:20 crc kubenswrapper[4753]: I1205 17:05:20.210946 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:20 crc kubenswrapper[4753]: I1205 17:05:20.210960 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:20 crc kubenswrapper[4753]: I1205 17:05:20.210979 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:20 crc kubenswrapper[4753]: I1205 17:05:20.210988 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:20Z","lastTransitionTime":"2025-12-05T17:05:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:20 crc kubenswrapper[4753]: I1205 17:05:20.313409 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:20 crc kubenswrapper[4753]: I1205 17:05:20.313447 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:20 crc kubenswrapper[4753]: I1205 17:05:20.313464 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:20 crc kubenswrapper[4753]: I1205 17:05:20.313480 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:20 crc kubenswrapper[4753]: I1205 17:05:20.313491 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:20Z","lastTransitionTime":"2025-12-05T17:05:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:20 crc kubenswrapper[4753]: I1205 17:05:20.416564 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:20 crc kubenswrapper[4753]: I1205 17:05:20.416610 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:20 crc kubenswrapper[4753]: I1205 17:05:20.416622 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:20 crc kubenswrapper[4753]: I1205 17:05:20.416638 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:20 crc kubenswrapper[4753]: I1205 17:05:20.416649 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:20Z","lastTransitionTime":"2025-12-05T17:05:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:20 crc kubenswrapper[4753]: I1205 17:05:20.518672 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:20 crc kubenswrapper[4753]: I1205 17:05:20.518716 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:20 crc kubenswrapper[4753]: I1205 17:05:20.518728 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:20 crc kubenswrapper[4753]: I1205 17:05:20.518744 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:20 crc kubenswrapper[4753]: I1205 17:05:20.518755 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:20Z","lastTransitionTime":"2025-12-05T17:05:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:20 crc kubenswrapper[4753]: I1205 17:05:20.621512 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:20 crc kubenswrapper[4753]: I1205 17:05:20.621550 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:20 crc kubenswrapper[4753]: I1205 17:05:20.621560 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:20 crc kubenswrapper[4753]: I1205 17:05:20.621574 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:20 crc kubenswrapper[4753]: I1205 17:05:20.621586 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:20Z","lastTransitionTime":"2025-12-05T17:05:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:20 crc kubenswrapper[4753]: I1205 17:05:20.720401 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:05:20 crc kubenswrapper[4753]: I1205 17:05:20.720401 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:05:20 crc kubenswrapper[4753]: E1205 17:05:20.720602 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:05:20 crc kubenswrapper[4753]: E1205 17:05:20.720533 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:05:20 crc kubenswrapper[4753]: I1205 17:05:20.720401 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jjgfd" Dec 05 17:05:20 crc kubenswrapper[4753]: E1205 17:05:20.720690 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jjgfd" podUID="00ab636b-9cc9-4a6f-8e6e-6442b35280ca" Dec 05 17:05:20 crc kubenswrapper[4753]: I1205 17:05:20.723403 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:20 crc kubenswrapper[4753]: I1205 17:05:20.723434 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:20 crc kubenswrapper[4753]: I1205 17:05:20.723443 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:20 crc kubenswrapper[4753]: I1205 17:05:20.723456 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:20 crc kubenswrapper[4753]: I1205 17:05:20.723466 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:20Z","lastTransitionTime":"2025-12-05T17:05:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:20 crc kubenswrapper[4753]: I1205 17:05:20.825533 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:20 crc kubenswrapper[4753]: I1205 17:05:20.825595 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:20 crc kubenswrapper[4753]: I1205 17:05:20.825606 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:20 crc kubenswrapper[4753]: I1205 17:05:20.825621 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:20 crc kubenswrapper[4753]: I1205 17:05:20.825630 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:20Z","lastTransitionTime":"2025-12-05T17:05:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:20 crc kubenswrapper[4753]: I1205 17:05:20.928139 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:20 crc kubenswrapper[4753]: I1205 17:05:20.928208 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:20 crc kubenswrapper[4753]: I1205 17:05:20.928224 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:20 crc kubenswrapper[4753]: I1205 17:05:20.928243 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:20 crc kubenswrapper[4753]: I1205 17:05:20.928256 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:20Z","lastTransitionTime":"2025-12-05T17:05:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:21 crc kubenswrapper[4753]: I1205 17:05:21.030315 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:21 crc kubenswrapper[4753]: I1205 17:05:21.030355 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:21 crc kubenswrapper[4753]: I1205 17:05:21.030367 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:21 crc kubenswrapper[4753]: I1205 17:05:21.030380 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:21 crc kubenswrapper[4753]: I1205 17:05:21.030390 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:21Z","lastTransitionTime":"2025-12-05T17:05:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:21 crc kubenswrapper[4753]: I1205 17:05:21.133362 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:21 crc kubenswrapper[4753]: I1205 17:05:21.133397 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:21 crc kubenswrapper[4753]: I1205 17:05:21.133407 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:21 crc kubenswrapper[4753]: I1205 17:05:21.133422 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:21 crc kubenswrapper[4753]: I1205 17:05:21.133432 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:21Z","lastTransitionTime":"2025-12-05T17:05:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:21 crc kubenswrapper[4753]: I1205 17:05:21.235579 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:21 crc kubenswrapper[4753]: I1205 17:05:21.235640 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:21 crc kubenswrapper[4753]: I1205 17:05:21.235650 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:21 crc kubenswrapper[4753]: I1205 17:05:21.235667 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:21 crc kubenswrapper[4753]: I1205 17:05:21.235677 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:21Z","lastTransitionTime":"2025-12-05T17:05:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:21 crc kubenswrapper[4753]: I1205 17:05:21.337901 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:21 crc kubenswrapper[4753]: I1205 17:05:21.337954 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:21 crc kubenswrapper[4753]: I1205 17:05:21.337971 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:21 crc kubenswrapper[4753]: I1205 17:05:21.337993 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:21 crc kubenswrapper[4753]: I1205 17:05:21.338009 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:21Z","lastTransitionTime":"2025-12-05T17:05:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:21 crc kubenswrapper[4753]: I1205 17:05:21.440143 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:21 crc kubenswrapper[4753]: I1205 17:05:21.440205 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:21 crc kubenswrapper[4753]: I1205 17:05:21.440218 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:21 crc kubenswrapper[4753]: I1205 17:05:21.440234 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:21 crc kubenswrapper[4753]: I1205 17:05:21.440246 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:21Z","lastTransitionTime":"2025-12-05T17:05:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:21 crc kubenswrapper[4753]: I1205 17:05:21.542926 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:21 crc kubenswrapper[4753]: I1205 17:05:21.542972 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:21 crc kubenswrapper[4753]: I1205 17:05:21.542983 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:21 crc kubenswrapper[4753]: I1205 17:05:21.543000 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:21 crc kubenswrapper[4753]: I1205 17:05:21.543011 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:21Z","lastTransitionTime":"2025-12-05T17:05:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:21 crc kubenswrapper[4753]: I1205 17:05:21.645783 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:21 crc kubenswrapper[4753]: I1205 17:05:21.645828 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:21 crc kubenswrapper[4753]: I1205 17:05:21.645840 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:21 crc kubenswrapper[4753]: I1205 17:05:21.645858 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:21 crc kubenswrapper[4753]: I1205 17:05:21.645870 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:21Z","lastTransitionTime":"2025-12-05T17:05:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:21 crc kubenswrapper[4753]: I1205 17:05:21.719997 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:05:21 crc kubenswrapper[4753]: E1205 17:05:21.720317 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:05:21 crc kubenswrapper[4753]: I1205 17:05:21.744360 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c52dbe30298514a572fd7cf4a301357ef0290af5894be7f9d08aff6c3fbe85a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:21Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:21 crc kubenswrapper[4753]: I1205 17:05:21.749091 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:21 crc kubenswrapper[4753]: I1205 17:05:21.749134 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:21 crc kubenswrapper[4753]: I1205 17:05:21.749163 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:21 crc kubenswrapper[4753]: I1205 17:05:21.749185 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:21 crc kubenswrapper[4753]: I1205 17:05:21.749200 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:21Z","lastTransitionTime":"2025-12-05T17:05:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:21 crc kubenswrapper[4753]: I1205 17:05:21.757553 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hpl8r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b2cc49130697a15123f202462d2f0781903baea370d4906d22d1b39c23320d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmzrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hpl8r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:21Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:21 crc kubenswrapper[4753]: I1205 17:05:21.775224 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee37fdbe-64d5-4bb1-8522-932d83e0648e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://195b8bf18e6cf937def214e377ec9def799c19ea4d5f008b66cc42048656ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a5f37b26bfe7933088e08379662250e7b188efbbb5784d49e8d86262e10416\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0061bf86592b301a0a9398d2938e614f76616ed7ddcf310aa50c961632fb58b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20eb41ef00b7711fdfef0e4f3bd83b2fbdb193be96cafc0ef87f048271e7dd78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fea878c0275a3934d26327c13cf5a93ae3b881132788f3d031af5b382bbdd02\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 17:04:35.064086 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 17:04:35.066802 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1773129596/tls.crt::/tmp/serving-cert-1773129596/tls.key\\\\\\\"\\\\nI1205 17:04:40.565607 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 17:04:40.585450 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 17:04:40.585477 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 17:04:40.585506 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 17:04:40.585513 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:04:40.600592 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:04:40.600617 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600622 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600626 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:04:40.600629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:04:40.600632 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:04:40.600635 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:04:40.600704 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 17:04:40.602558 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0585e546af65a61265267b97083556b811ad35ee6c48fa84262b3f4f25eaf907\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:21Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:21 crc kubenswrapper[4753]: I1205 17:05:21.787993 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8934d1ec7490b421708e4678d69a55cdd381563ada3d945d066fcf4abba49f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e799acff1e50a664e95e30896e435bc77a35c8560044c5f180e16c3534bcf828\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:21Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:21 crc kubenswrapper[4753]: I1205 17:05:21.810762 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:21Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:21 crc kubenswrapper[4753]: I1205 17:05:21.824974 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vj5f7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f38be29-f040-4e7d-9026-36929c0c5cda\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b5d403644e00b367518058ecf22ed9913df9a1353e2fede8802deaa288a48e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc2cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vj5f7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:21Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:21 crc kubenswrapper[4753]: I1205 17:05:21.839654 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:21Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:21 crc kubenswrapper[4753]: I1205 17:05:21.850659 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:21 crc kubenswrapper[4753]: I1205 17:05:21.850690 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:21 crc kubenswrapper[4753]: I1205 17:05:21.850705 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:21 crc kubenswrapper[4753]: I1205 17:05:21.850725 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:21 crc kubenswrapper[4753]: I1205 17:05:21.850737 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:21Z","lastTransitionTime":"2025-12-05T17:05:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:21 crc kubenswrapper[4753]: I1205 17:05:21.855675 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:21Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:21 crc kubenswrapper[4753]: I1205 17:05:21.869615 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65284411-042e-46b9-8e55-cabc9e78a397\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e83b48779d4b8b709f9bd7351180040315002f738ada5ab034c883f87ef8734\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f6e27e109eeb52dae4e4b35bdf84e78f0786fc8d15c2498dd8d8c70c3d64e4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7886d72b05768ec1d515cfb2837f52ae16e82445b72a3016762308b80892d55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a662a8ca2ca9bcf02ace463bf26e44293b9776c69f3f1f84110392c3cab6e83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:21Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:21 crc kubenswrapper[4753]: I1205 17:05:21.883623 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fb6627d3-0817-4737-b5c7-00b2abc67b0f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bba91e82256742fa5b98e2e481b8ab267ce89d71e180581f34fa371d03bbc1c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf90148205c7fde7c73b3ed1130fa5c9fc4b6562c0e1f4f6b009a72677f32f22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95027e80190e6caa0524ede7ef31c38c8861aeca94936b1a7a819fdc650c7969\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1bd3444395954ccae8d5c8137c6ebd7f62a6c864cea89f98a137af3e26cda8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1bd3444395954ccae8d5c8137c6ebd7f62a6c864cea89f98a137af3e26cda8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:21Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:21 crc kubenswrapper[4753]: I1205 17:05:21.899182 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2rg4s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"751d4d21-4eb8-4236-bca2-d81f094ff2f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bda355ba818e87604cf7cfba53f0ee3116f2d4234e7d9631ec7e6571ed34030\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa8d4e5672a07ea28ddba02eaaf603750cd3067ff47165776d2dc7f2ad42a290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa8d4e5672a07ea28ddba02eaaf603750cd3067ff47165776d2dc7f2ad42a290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87e685f0d2c857d143c3ee4b13adf2afe6796c40ae87e2a304b78783907a785a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87e685f0d2c857d143c3ee4b13adf2afe6796c40ae87e2a304b78783907a785a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://842a75d6b100069785dbaf413df3b610a5df28c9ec124100eb31c965985282fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://842a75d6b100069785dbaf413df3b610a5df28c9ec124100eb31c965985282fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4ac00f8ef346db3de91e7bec01a56eac9f27d5a501d6f686173ece4897b9694\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4ac00f8ef346db3de91e7bec01a56eac9f27d5a501d6f686173ece4897b9694\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3585437cbe0f17e5bf92a74123f0e6f13475b6e5d49b145f2b9b869559b53310\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3585437cbe0f17e5bf92a74123f0e6f13475b6e5d49b145f2b9b869559b53310\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e60aa96f2377e63e44e932efe51f79832b778b8ef41fd47e728a80286956c62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e60aa96f2377e63e44e932efe51f79832b778b8ef41fd47e728a80286956c62\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2rg4s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:21Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:21 crc kubenswrapper[4753]: I1205 17:05:21.918363 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f6585b3b62fc4a78f89b3413326d6d5259cb8c338936f2d5def6185d81d46f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cb72322d436f10b006a4bb7a91b255451aba90e86100fdf249be6443159bd89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d0273d889da0bd6ea3db8040da2eab62fc750f308cb9a62dee71b6c9f2fadd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e79e99eb44ff95c2f9929326313e41629e6e03ebc068537bd27bc4c89ad5b0ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20bcab2c2912120c819bacdf478db82a78500908125e6efadaef1f3409eb0d49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ead15f2a3179f03d770beadcaa1374561d5aebb4874615a334edccff0c2f300\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b6e87477fcc60efe5e27b6865aa0f91f58bb756f255afe613932a906bd6047a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b6e87477fcc60efe5e27b6865aa0f91f58bb756f255afe613932a906bd6047a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:05:17Z\\\",\\\"message\\\":\\\"all/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 17:05:17.523236 6490 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 17:05:17.523267 6490 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1205 17:05:17.523298 6490 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1205 17:05:17.523307 6490 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1205 17:05:17.523331 6490 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1205 17:05:17.523345 6490 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1205 17:05:17.523344 6490 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1205 17:05:17.523350 6490 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1205 17:05:17.523358 6490 handler.go:208] Removed *v1.Node event handler 7\\\\nI1205 17:05:17.523364 6490 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 17:05:17.523371 6490 handler.go:208] Removed *v1.Node event handler 2\\\\nI1205 17:05:17.523370 6490 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 17:05:17.523388 6490 factory.go:656] Stopping watch factory\\\\nI1205 17:05:17.523400 6490 ovnkube.go:599] Stopped ovnkube\\\\nI1205 17:05:17.523437 6490 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 17\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:05:16Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-98fvv_openshift-ovn-kubernetes(f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://899db0b5d45776b0560a2908ce63ec8ea2b458e2e492189737e930688b4ed2e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://073ffcef7324b498f2cb47680b64bf44e474a7732a5c2b34661da22c3e0f6c17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://073ffcef7324b498f2cb47680b64bf44e474a7732a5c2b34661da22c3e0f6c17\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-98fvv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:21Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:21 crc kubenswrapper[4753]: I1205 17:05:21.932705 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-f6qn6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"693c26bb-e75f-4f7f-bd2a-bdf7dcb0af06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02ea0085a961c324f4a426d68972043f4b24b3aa03aed623ebc46bbafc857d1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g4w6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-f6qn6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:21Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:21 crc kubenswrapper[4753]: I1205 17:05:21.944625 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bhvk4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"338d3da9-209c-4ca9-a37d-6ea5731d1622\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7747e4738055a7b994119bad9b30d47e0b510c7407c53df29b674c553dbbcbb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qh6zg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://097885ac231f081a81d51cf0091df6f93ca49068340438454db429194e5475b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qh6zg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-bhvk4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:21Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:21 crc kubenswrapper[4753]: I1205 17:05:21.953547 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:21 crc kubenswrapper[4753]: I1205 17:05:21.953604 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:21 crc kubenswrapper[4753]: I1205 17:05:21.953617 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:21 crc kubenswrapper[4753]: I1205 17:05:21.953635 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:21 crc kubenswrapper[4753]: I1205 17:05:21.953647 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:21Z","lastTransitionTime":"2025-12-05T17:05:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:21 crc kubenswrapper[4753]: I1205 17:05:21.959625 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-jjgfd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"00ab636b-9cc9-4a6f-8e6e-6442b35280ca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhlg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhlg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:57Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-jjgfd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:21Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:21 crc kubenswrapper[4753]: I1205 17:05:21.972494 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://408690fbd818b50273c914cc463acec30ff77bbf7d26680bffa2451a1d1ddcf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:21Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:21 crc kubenswrapper[4753]: I1205 17:05:21.983308 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78c3b80f4d08dc9fd4215fc87bbd7331f765f301fa587f5fd4ded54cd8db6587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60c5d0ca3d26171050d592c3d76f0db4ca1a12344c40825a8e7c5fd579814c80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-khn68\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:21Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:22 crc kubenswrapper[4753]: I1205 17:05:22.055398 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:22 crc kubenswrapper[4753]: I1205 17:05:22.055438 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:22 crc kubenswrapper[4753]: I1205 17:05:22.055449 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:22 crc kubenswrapper[4753]: I1205 17:05:22.055466 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:22 crc kubenswrapper[4753]: I1205 17:05:22.055477 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:22Z","lastTransitionTime":"2025-12-05T17:05:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:22 crc kubenswrapper[4753]: I1205 17:05:22.157637 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:22 crc kubenswrapper[4753]: I1205 17:05:22.157670 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:22 crc kubenswrapper[4753]: I1205 17:05:22.157679 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:22 crc kubenswrapper[4753]: I1205 17:05:22.157692 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:22 crc kubenswrapper[4753]: I1205 17:05:22.157700 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:22Z","lastTransitionTime":"2025-12-05T17:05:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:22 crc kubenswrapper[4753]: I1205 17:05:22.259970 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:22 crc kubenswrapper[4753]: I1205 17:05:22.260023 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:22 crc kubenswrapper[4753]: I1205 17:05:22.260032 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:22 crc kubenswrapper[4753]: I1205 17:05:22.260048 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:22 crc kubenswrapper[4753]: I1205 17:05:22.260060 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:22Z","lastTransitionTime":"2025-12-05T17:05:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:22 crc kubenswrapper[4753]: I1205 17:05:22.363130 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:22 crc kubenswrapper[4753]: I1205 17:05:22.363184 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:22 crc kubenswrapper[4753]: I1205 17:05:22.363196 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:22 crc kubenswrapper[4753]: I1205 17:05:22.363209 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:22 crc kubenswrapper[4753]: I1205 17:05:22.363218 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:22Z","lastTransitionTime":"2025-12-05T17:05:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:22 crc kubenswrapper[4753]: I1205 17:05:22.466214 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:22 crc kubenswrapper[4753]: I1205 17:05:22.466255 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:22 crc kubenswrapper[4753]: I1205 17:05:22.466265 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:22 crc kubenswrapper[4753]: I1205 17:05:22.466281 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:22 crc kubenswrapper[4753]: I1205 17:05:22.466291 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:22Z","lastTransitionTime":"2025-12-05T17:05:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:22 crc kubenswrapper[4753]: I1205 17:05:22.568489 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:22 crc kubenswrapper[4753]: I1205 17:05:22.568540 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:22 crc kubenswrapper[4753]: I1205 17:05:22.568551 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:22 crc kubenswrapper[4753]: I1205 17:05:22.568570 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:22 crc kubenswrapper[4753]: I1205 17:05:22.568582 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:22Z","lastTransitionTime":"2025-12-05T17:05:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:22 crc kubenswrapper[4753]: I1205 17:05:22.670902 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:22 crc kubenswrapper[4753]: I1205 17:05:22.670948 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:22 crc kubenswrapper[4753]: I1205 17:05:22.670956 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:22 crc kubenswrapper[4753]: I1205 17:05:22.670971 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:22 crc kubenswrapper[4753]: I1205 17:05:22.670981 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:22Z","lastTransitionTime":"2025-12-05T17:05:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:22 crc kubenswrapper[4753]: I1205 17:05:22.719836 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:05:22 crc kubenswrapper[4753]: I1205 17:05:22.719836 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:05:22 crc kubenswrapper[4753]: E1205 17:05:22.719966 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:05:22 crc kubenswrapper[4753]: E1205 17:05:22.720021 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:05:22 crc kubenswrapper[4753]: I1205 17:05:22.719860 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jjgfd" Dec 05 17:05:22 crc kubenswrapper[4753]: E1205 17:05:22.720115 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jjgfd" podUID="00ab636b-9cc9-4a6f-8e6e-6442b35280ca" Dec 05 17:05:22 crc kubenswrapper[4753]: I1205 17:05:22.773955 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:22 crc kubenswrapper[4753]: I1205 17:05:22.773990 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:22 crc kubenswrapper[4753]: I1205 17:05:22.774001 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:22 crc kubenswrapper[4753]: I1205 17:05:22.774016 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:22 crc kubenswrapper[4753]: I1205 17:05:22.774028 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:22Z","lastTransitionTime":"2025-12-05T17:05:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:22 crc kubenswrapper[4753]: I1205 17:05:22.876131 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:22 crc kubenswrapper[4753]: I1205 17:05:22.876195 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:22 crc kubenswrapper[4753]: I1205 17:05:22.876206 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:22 crc kubenswrapper[4753]: I1205 17:05:22.876224 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:22 crc kubenswrapper[4753]: I1205 17:05:22.876457 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:22Z","lastTransitionTime":"2025-12-05T17:05:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:22 crc kubenswrapper[4753]: I1205 17:05:22.979444 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:22 crc kubenswrapper[4753]: I1205 17:05:22.979504 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:22 crc kubenswrapper[4753]: I1205 17:05:22.979519 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:22 crc kubenswrapper[4753]: I1205 17:05:22.979542 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:22 crc kubenswrapper[4753]: I1205 17:05:22.979558 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:22Z","lastTransitionTime":"2025-12-05T17:05:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:23 crc kubenswrapper[4753]: I1205 17:05:23.082729 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:23 crc kubenswrapper[4753]: I1205 17:05:23.082785 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:23 crc kubenswrapper[4753]: I1205 17:05:23.082794 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:23 crc kubenswrapper[4753]: I1205 17:05:23.082811 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:23 crc kubenswrapper[4753]: I1205 17:05:23.082824 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:23Z","lastTransitionTime":"2025-12-05T17:05:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:23 crc kubenswrapper[4753]: I1205 17:05:23.186225 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:23 crc kubenswrapper[4753]: I1205 17:05:23.186299 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:23 crc kubenswrapper[4753]: I1205 17:05:23.186320 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:23 crc kubenswrapper[4753]: I1205 17:05:23.186345 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:23 crc kubenswrapper[4753]: I1205 17:05:23.186361 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:23Z","lastTransitionTime":"2025-12-05T17:05:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:23 crc kubenswrapper[4753]: I1205 17:05:23.289648 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:23 crc kubenswrapper[4753]: I1205 17:05:23.289729 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:23 crc kubenswrapper[4753]: I1205 17:05:23.289749 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:23 crc kubenswrapper[4753]: I1205 17:05:23.289787 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:23 crc kubenswrapper[4753]: I1205 17:05:23.289810 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:23Z","lastTransitionTime":"2025-12-05T17:05:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:23 crc kubenswrapper[4753]: I1205 17:05:23.393864 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:23 crc kubenswrapper[4753]: I1205 17:05:23.393990 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:23 crc kubenswrapper[4753]: I1205 17:05:23.394009 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:23 crc kubenswrapper[4753]: I1205 17:05:23.394035 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:23 crc kubenswrapper[4753]: I1205 17:05:23.394051 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:23Z","lastTransitionTime":"2025-12-05T17:05:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:23 crc kubenswrapper[4753]: I1205 17:05:23.497000 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:23 crc kubenswrapper[4753]: I1205 17:05:23.497057 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:23 crc kubenswrapper[4753]: I1205 17:05:23.497069 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:23 crc kubenswrapper[4753]: I1205 17:05:23.497087 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:23 crc kubenswrapper[4753]: I1205 17:05:23.497097 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:23Z","lastTransitionTime":"2025-12-05T17:05:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:23 crc kubenswrapper[4753]: I1205 17:05:23.599239 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:23 crc kubenswrapper[4753]: I1205 17:05:23.599290 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:23 crc kubenswrapper[4753]: I1205 17:05:23.599304 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:23 crc kubenswrapper[4753]: I1205 17:05:23.599323 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:23 crc kubenswrapper[4753]: I1205 17:05:23.599335 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:23Z","lastTransitionTime":"2025-12-05T17:05:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:23 crc kubenswrapper[4753]: I1205 17:05:23.702814 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:23 crc kubenswrapper[4753]: I1205 17:05:23.702850 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:23 crc kubenswrapper[4753]: I1205 17:05:23.702859 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:23 crc kubenswrapper[4753]: I1205 17:05:23.702874 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:23 crc kubenswrapper[4753]: I1205 17:05:23.702917 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:23Z","lastTransitionTime":"2025-12-05T17:05:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:23 crc kubenswrapper[4753]: I1205 17:05:23.719794 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:05:23 crc kubenswrapper[4753]: E1205 17:05:23.720141 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:05:23 crc kubenswrapper[4753]: I1205 17:05:23.805831 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:23 crc kubenswrapper[4753]: I1205 17:05:23.805898 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:23 crc kubenswrapper[4753]: I1205 17:05:23.805921 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:23 crc kubenswrapper[4753]: I1205 17:05:23.805989 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:23 crc kubenswrapper[4753]: I1205 17:05:23.806014 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:23Z","lastTransitionTime":"2025-12-05T17:05:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:23 crc kubenswrapper[4753]: I1205 17:05:23.908427 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:23 crc kubenswrapper[4753]: I1205 17:05:23.908468 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:23 crc kubenswrapper[4753]: I1205 17:05:23.908477 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:23 crc kubenswrapper[4753]: I1205 17:05:23.908492 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:23 crc kubenswrapper[4753]: I1205 17:05:23.908506 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:23Z","lastTransitionTime":"2025-12-05T17:05:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:24 crc kubenswrapper[4753]: I1205 17:05:24.010932 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:24 crc kubenswrapper[4753]: I1205 17:05:24.010979 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:24 crc kubenswrapper[4753]: I1205 17:05:24.010990 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:24 crc kubenswrapper[4753]: I1205 17:05:24.011006 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:24 crc kubenswrapper[4753]: I1205 17:05:24.011017 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:24Z","lastTransitionTime":"2025-12-05T17:05:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:24 crc kubenswrapper[4753]: I1205 17:05:24.113280 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:24 crc kubenswrapper[4753]: I1205 17:05:24.113331 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:24 crc kubenswrapper[4753]: I1205 17:05:24.113342 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:24 crc kubenswrapper[4753]: I1205 17:05:24.113360 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:24 crc kubenswrapper[4753]: I1205 17:05:24.113377 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:24Z","lastTransitionTime":"2025-12-05T17:05:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:24 crc kubenswrapper[4753]: I1205 17:05:24.215454 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:24 crc kubenswrapper[4753]: I1205 17:05:24.215521 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:24 crc kubenswrapper[4753]: I1205 17:05:24.215531 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:24 crc kubenswrapper[4753]: I1205 17:05:24.215544 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:24 crc kubenswrapper[4753]: I1205 17:05:24.215569 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:24Z","lastTransitionTime":"2025-12-05T17:05:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:24 crc kubenswrapper[4753]: I1205 17:05:24.317441 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:24 crc kubenswrapper[4753]: I1205 17:05:24.317499 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:24 crc kubenswrapper[4753]: I1205 17:05:24.317510 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:24 crc kubenswrapper[4753]: I1205 17:05:24.317526 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:24 crc kubenswrapper[4753]: I1205 17:05:24.317536 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:24Z","lastTransitionTime":"2025-12-05T17:05:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:24 crc kubenswrapper[4753]: I1205 17:05:24.419501 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:24 crc kubenswrapper[4753]: I1205 17:05:24.419565 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:24 crc kubenswrapper[4753]: I1205 17:05:24.419574 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:24 crc kubenswrapper[4753]: I1205 17:05:24.419588 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:24 crc kubenswrapper[4753]: I1205 17:05:24.419600 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:24Z","lastTransitionTime":"2025-12-05T17:05:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:24 crc kubenswrapper[4753]: I1205 17:05:24.521809 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:24 crc kubenswrapper[4753]: I1205 17:05:24.521866 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:24 crc kubenswrapper[4753]: I1205 17:05:24.521881 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:24 crc kubenswrapper[4753]: I1205 17:05:24.521897 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:24 crc kubenswrapper[4753]: I1205 17:05:24.521910 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:24Z","lastTransitionTime":"2025-12-05T17:05:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:24 crc kubenswrapper[4753]: I1205 17:05:24.624093 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:24 crc kubenswrapper[4753]: I1205 17:05:24.624174 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:24 crc kubenswrapper[4753]: I1205 17:05:24.624188 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:24 crc kubenswrapper[4753]: I1205 17:05:24.624204 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:24 crc kubenswrapper[4753]: I1205 17:05:24.624216 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:24Z","lastTransitionTime":"2025-12-05T17:05:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:24 crc kubenswrapper[4753]: I1205 17:05:24.720080 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jjgfd" Dec 05 17:05:24 crc kubenswrapper[4753]: I1205 17:05:24.720106 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:05:24 crc kubenswrapper[4753]: I1205 17:05:24.720098 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:05:24 crc kubenswrapper[4753]: E1205 17:05:24.720235 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jjgfd" podUID="00ab636b-9cc9-4a6f-8e6e-6442b35280ca" Dec 05 17:05:24 crc kubenswrapper[4753]: E1205 17:05:24.720356 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:05:24 crc kubenswrapper[4753]: E1205 17:05:24.720413 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:05:24 crc kubenswrapper[4753]: I1205 17:05:24.726197 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:24 crc kubenswrapper[4753]: I1205 17:05:24.726224 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:24 crc kubenswrapper[4753]: I1205 17:05:24.726232 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:24 crc kubenswrapper[4753]: I1205 17:05:24.726244 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:24 crc kubenswrapper[4753]: I1205 17:05:24.726251 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:24Z","lastTransitionTime":"2025-12-05T17:05:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:24 crc kubenswrapper[4753]: I1205 17:05:24.828777 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:24 crc kubenswrapper[4753]: I1205 17:05:24.828808 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:24 crc kubenswrapper[4753]: I1205 17:05:24.828817 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:24 crc kubenswrapper[4753]: I1205 17:05:24.828832 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:24 crc kubenswrapper[4753]: I1205 17:05:24.828843 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:24Z","lastTransitionTime":"2025-12-05T17:05:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:24 crc kubenswrapper[4753]: I1205 17:05:24.931625 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:24 crc kubenswrapper[4753]: I1205 17:05:24.931842 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:24 crc kubenswrapper[4753]: I1205 17:05:24.931864 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:24 crc kubenswrapper[4753]: I1205 17:05:24.931890 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:24 crc kubenswrapper[4753]: I1205 17:05:24.931908 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:24Z","lastTransitionTime":"2025-12-05T17:05:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.034411 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.034447 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.034458 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.034472 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.034480 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:25Z","lastTransitionTime":"2025-12-05T17:05:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.144264 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.144363 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.144382 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.144409 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.144427 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:25Z","lastTransitionTime":"2025-12-05T17:05:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.247649 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.247703 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.247712 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.247729 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.247739 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:25Z","lastTransitionTime":"2025-12-05T17:05:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.349994 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.350031 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.350043 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.350059 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.350069 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:25Z","lastTransitionTime":"2025-12-05T17:05:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.453203 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.453246 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.453256 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.453275 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.453287 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:25Z","lastTransitionTime":"2025-12-05T17:05:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.556798 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.556866 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.556884 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.556914 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.556932 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:25Z","lastTransitionTime":"2025-12-05T17:05:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.609922 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.609972 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.609981 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.609999 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.610011 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:25Z","lastTransitionTime":"2025-12-05T17:05:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:25 crc kubenswrapper[4753]: E1205 17:05:25.624515 4753 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:25Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:25Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6cf3541b-b15a-4035-99a1-dd4a7b86e07c\\\",\\\"systemUUID\\\":\\\"6ae126b1-60b3-4aa4-8711-3da4c1b89426\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:25Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.628056 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.628099 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.628110 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.628129 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.628161 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:25Z","lastTransitionTime":"2025-12-05T17:05:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:25 crc kubenswrapper[4753]: E1205 17:05:25.641412 4753 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:25Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:25Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6cf3541b-b15a-4035-99a1-dd4a7b86e07c\\\",\\\"systemUUID\\\":\\\"6ae126b1-60b3-4aa4-8711-3da4c1b89426\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:25Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.644931 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.644969 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.644979 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.644993 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.645003 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:25Z","lastTransitionTime":"2025-12-05T17:05:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:25 crc kubenswrapper[4753]: E1205 17:05:25.657875 4753 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:25Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:25Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6cf3541b-b15a-4035-99a1-dd4a7b86e07c\\\",\\\"systemUUID\\\":\\\"6ae126b1-60b3-4aa4-8711-3da4c1b89426\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:25Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.660974 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.661014 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.661024 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.661042 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.661052 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:25Z","lastTransitionTime":"2025-12-05T17:05:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:25 crc kubenswrapper[4753]: E1205 17:05:25.671862 4753 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:25Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:25Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6cf3541b-b15a-4035-99a1-dd4a7b86e07c\\\",\\\"systemUUID\\\":\\\"6ae126b1-60b3-4aa4-8711-3da4c1b89426\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:25Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.675179 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.675213 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.675224 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.675240 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.675251 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:25Z","lastTransitionTime":"2025-12-05T17:05:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:25 crc kubenswrapper[4753]: E1205 17:05:25.685840 4753 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:25Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:25Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:25Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6cf3541b-b15a-4035-99a1-dd4a7b86e07c\\\",\\\"systemUUID\\\":\\\"6ae126b1-60b3-4aa4-8711-3da4c1b89426\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:25Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:25 crc kubenswrapper[4753]: E1205 17:05:25.685957 4753 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.687487 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.687515 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.687523 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.687537 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.687549 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:25Z","lastTransitionTime":"2025-12-05T17:05:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.720380 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:05:25 crc kubenswrapper[4753]: E1205 17:05:25.720500 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.789983 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.790033 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.790048 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.790069 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.790086 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:25Z","lastTransitionTime":"2025-12-05T17:05:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.893126 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.893191 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.893202 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.893218 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.893231 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:25Z","lastTransitionTime":"2025-12-05T17:05:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.996591 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.996627 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.996636 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.996651 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:25 crc kubenswrapper[4753]: I1205 17:05:25.996662 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:25Z","lastTransitionTime":"2025-12-05T17:05:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:26 crc kubenswrapper[4753]: I1205 17:05:26.099305 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:26 crc kubenswrapper[4753]: I1205 17:05:26.099350 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:26 crc kubenswrapper[4753]: I1205 17:05:26.099359 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:26 crc kubenswrapper[4753]: I1205 17:05:26.099373 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:26 crc kubenswrapper[4753]: I1205 17:05:26.099382 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:26Z","lastTransitionTime":"2025-12-05T17:05:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:26 crc kubenswrapper[4753]: I1205 17:05:26.202034 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:26 crc kubenswrapper[4753]: I1205 17:05:26.202075 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:26 crc kubenswrapper[4753]: I1205 17:05:26.202087 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:26 crc kubenswrapper[4753]: I1205 17:05:26.202104 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:26 crc kubenswrapper[4753]: I1205 17:05:26.202120 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:26Z","lastTransitionTime":"2025-12-05T17:05:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:26 crc kubenswrapper[4753]: I1205 17:05:26.304616 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:26 crc kubenswrapper[4753]: I1205 17:05:26.304653 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:26 crc kubenswrapper[4753]: I1205 17:05:26.304662 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:26 crc kubenswrapper[4753]: I1205 17:05:26.304678 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:26 crc kubenswrapper[4753]: I1205 17:05:26.304689 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:26Z","lastTransitionTime":"2025-12-05T17:05:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:26 crc kubenswrapper[4753]: I1205 17:05:26.407136 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:26 crc kubenswrapper[4753]: I1205 17:05:26.407237 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:26 crc kubenswrapper[4753]: I1205 17:05:26.407249 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:26 crc kubenswrapper[4753]: I1205 17:05:26.407265 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:26 crc kubenswrapper[4753]: I1205 17:05:26.407276 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:26Z","lastTransitionTime":"2025-12-05T17:05:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:26 crc kubenswrapper[4753]: I1205 17:05:26.509944 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:26 crc kubenswrapper[4753]: I1205 17:05:26.509988 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:26 crc kubenswrapper[4753]: I1205 17:05:26.509999 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:26 crc kubenswrapper[4753]: I1205 17:05:26.510017 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:26 crc kubenswrapper[4753]: I1205 17:05:26.510029 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:26Z","lastTransitionTime":"2025-12-05T17:05:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:26 crc kubenswrapper[4753]: I1205 17:05:26.612086 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:26 crc kubenswrapper[4753]: I1205 17:05:26.612121 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:26 crc kubenswrapper[4753]: I1205 17:05:26.612129 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:26 crc kubenswrapper[4753]: I1205 17:05:26.612141 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:26 crc kubenswrapper[4753]: I1205 17:05:26.612167 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:26Z","lastTransitionTime":"2025-12-05T17:05:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:26 crc kubenswrapper[4753]: I1205 17:05:26.714024 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:26 crc kubenswrapper[4753]: I1205 17:05:26.714060 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:26 crc kubenswrapper[4753]: I1205 17:05:26.714069 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:26 crc kubenswrapper[4753]: I1205 17:05:26.714083 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:26 crc kubenswrapper[4753]: I1205 17:05:26.714092 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:26Z","lastTransitionTime":"2025-12-05T17:05:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:26 crc kubenswrapper[4753]: I1205 17:05:26.719390 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:05:26 crc kubenswrapper[4753]: I1205 17:05:26.719426 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:05:26 crc kubenswrapper[4753]: E1205 17:05:26.719502 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:05:26 crc kubenswrapper[4753]: I1205 17:05:26.719390 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jjgfd" Dec 05 17:05:26 crc kubenswrapper[4753]: E1205 17:05:26.719576 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:05:26 crc kubenswrapper[4753]: E1205 17:05:26.719659 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jjgfd" podUID="00ab636b-9cc9-4a6f-8e6e-6442b35280ca" Dec 05 17:05:26 crc kubenswrapper[4753]: I1205 17:05:26.816179 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:26 crc kubenswrapper[4753]: I1205 17:05:26.816236 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:26 crc kubenswrapper[4753]: I1205 17:05:26.816246 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:26 crc kubenswrapper[4753]: I1205 17:05:26.816264 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:26 crc kubenswrapper[4753]: I1205 17:05:26.816276 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:26Z","lastTransitionTime":"2025-12-05T17:05:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:26 crc kubenswrapper[4753]: I1205 17:05:26.918856 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:26 crc kubenswrapper[4753]: I1205 17:05:26.918892 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:26 crc kubenswrapper[4753]: I1205 17:05:26.918903 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:26 crc kubenswrapper[4753]: I1205 17:05:26.918920 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:26 crc kubenswrapper[4753]: I1205 17:05:26.918930 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:26Z","lastTransitionTime":"2025-12-05T17:05:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:27 crc kubenswrapper[4753]: I1205 17:05:27.021591 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:27 crc kubenswrapper[4753]: I1205 17:05:27.021635 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:27 crc kubenswrapper[4753]: I1205 17:05:27.021648 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:27 crc kubenswrapper[4753]: I1205 17:05:27.021666 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:27 crc kubenswrapper[4753]: I1205 17:05:27.021679 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:27Z","lastTransitionTime":"2025-12-05T17:05:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:27 crc kubenswrapper[4753]: I1205 17:05:27.124305 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:27 crc kubenswrapper[4753]: I1205 17:05:27.124342 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:27 crc kubenswrapper[4753]: I1205 17:05:27.124351 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:27 crc kubenswrapper[4753]: I1205 17:05:27.124364 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:27 crc kubenswrapper[4753]: I1205 17:05:27.124373 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:27Z","lastTransitionTime":"2025-12-05T17:05:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:27 crc kubenswrapper[4753]: I1205 17:05:27.226385 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:27 crc kubenswrapper[4753]: I1205 17:05:27.226428 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:27 crc kubenswrapper[4753]: I1205 17:05:27.226444 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:27 crc kubenswrapper[4753]: I1205 17:05:27.226460 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:27 crc kubenswrapper[4753]: I1205 17:05:27.226471 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:27Z","lastTransitionTime":"2025-12-05T17:05:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:27 crc kubenswrapper[4753]: I1205 17:05:27.328774 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:27 crc kubenswrapper[4753]: I1205 17:05:27.328813 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:27 crc kubenswrapper[4753]: I1205 17:05:27.328822 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:27 crc kubenswrapper[4753]: I1205 17:05:27.328839 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:27 crc kubenswrapper[4753]: I1205 17:05:27.328849 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:27Z","lastTransitionTime":"2025-12-05T17:05:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:27 crc kubenswrapper[4753]: I1205 17:05:27.431045 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:27 crc kubenswrapper[4753]: I1205 17:05:27.431100 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:27 crc kubenswrapper[4753]: I1205 17:05:27.431128 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:27 crc kubenswrapper[4753]: I1205 17:05:27.431160 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:27 crc kubenswrapper[4753]: I1205 17:05:27.431172 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:27Z","lastTransitionTime":"2025-12-05T17:05:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:27 crc kubenswrapper[4753]: I1205 17:05:27.533300 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:27 crc kubenswrapper[4753]: I1205 17:05:27.533349 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:27 crc kubenswrapper[4753]: I1205 17:05:27.533361 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:27 crc kubenswrapper[4753]: I1205 17:05:27.533377 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:27 crc kubenswrapper[4753]: I1205 17:05:27.533391 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:27Z","lastTransitionTime":"2025-12-05T17:05:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:27 crc kubenswrapper[4753]: I1205 17:05:27.635685 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:27 crc kubenswrapper[4753]: I1205 17:05:27.635747 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:27 crc kubenswrapper[4753]: I1205 17:05:27.635759 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:27 crc kubenswrapper[4753]: I1205 17:05:27.635780 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:27 crc kubenswrapper[4753]: I1205 17:05:27.635792 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:27Z","lastTransitionTime":"2025-12-05T17:05:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:27 crc kubenswrapper[4753]: I1205 17:05:27.719838 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:05:27 crc kubenswrapper[4753]: E1205 17:05:27.720004 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:05:27 crc kubenswrapper[4753]: I1205 17:05:27.737829 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:27 crc kubenswrapper[4753]: I1205 17:05:27.737884 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:27 crc kubenswrapper[4753]: I1205 17:05:27.737900 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:27 crc kubenswrapper[4753]: I1205 17:05:27.737915 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:27 crc kubenswrapper[4753]: I1205 17:05:27.737925 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:27Z","lastTransitionTime":"2025-12-05T17:05:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:27 crc kubenswrapper[4753]: I1205 17:05:27.840361 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:27 crc kubenswrapper[4753]: I1205 17:05:27.840425 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:27 crc kubenswrapper[4753]: I1205 17:05:27.840443 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:27 crc kubenswrapper[4753]: I1205 17:05:27.840467 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:27 crc kubenswrapper[4753]: I1205 17:05:27.840486 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:27Z","lastTransitionTime":"2025-12-05T17:05:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:27 crc kubenswrapper[4753]: I1205 17:05:27.942353 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:27 crc kubenswrapper[4753]: I1205 17:05:27.942389 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:27 crc kubenswrapper[4753]: I1205 17:05:27.942400 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:27 crc kubenswrapper[4753]: I1205 17:05:27.942414 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:27 crc kubenswrapper[4753]: I1205 17:05:27.942425 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:27Z","lastTransitionTime":"2025-12-05T17:05:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:28 crc kubenswrapper[4753]: I1205 17:05:28.044737 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:28 crc kubenswrapper[4753]: I1205 17:05:28.044783 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:28 crc kubenswrapper[4753]: I1205 17:05:28.044793 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:28 crc kubenswrapper[4753]: I1205 17:05:28.044828 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:28 crc kubenswrapper[4753]: I1205 17:05:28.044840 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:28Z","lastTransitionTime":"2025-12-05T17:05:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:28 crc kubenswrapper[4753]: I1205 17:05:28.147877 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:28 crc kubenswrapper[4753]: I1205 17:05:28.147950 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:28 crc kubenswrapper[4753]: I1205 17:05:28.147965 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:28 crc kubenswrapper[4753]: I1205 17:05:28.148053 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:28 crc kubenswrapper[4753]: I1205 17:05:28.148076 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:28Z","lastTransitionTime":"2025-12-05T17:05:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:28 crc kubenswrapper[4753]: I1205 17:05:28.250747 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:28 crc kubenswrapper[4753]: I1205 17:05:28.250787 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:28 crc kubenswrapper[4753]: I1205 17:05:28.250796 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:28 crc kubenswrapper[4753]: I1205 17:05:28.250813 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:28 crc kubenswrapper[4753]: I1205 17:05:28.250824 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:28Z","lastTransitionTime":"2025-12-05T17:05:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:28 crc kubenswrapper[4753]: I1205 17:05:28.352756 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:28 crc kubenswrapper[4753]: I1205 17:05:28.352795 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:28 crc kubenswrapper[4753]: I1205 17:05:28.352806 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:28 crc kubenswrapper[4753]: I1205 17:05:28.352820 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:28 crc kubenswrapper[4753]: I1205 17:05:28.352830 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:28Z","lastTransitionTime":"2025-12-05T17:05:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:28 crc kubenswrapper[4753]: I1205 17:05:28.455054 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:28 crc kubenswrapper[4753]: I1205 17:05:28.455092 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:28 crc kubenswrapper[4753]: I1205 17:05:28.455105 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:28 crc kubenswrapper[4753]: I1205 17:05:28.455120 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:28 crc kubenswrapper[4753]: I1205 17:05:28.455132 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:28Z","lastTransitionTime":"2025-12-05T17:05:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:28 crc kubenswrapper[4753]: I1205 17:05:28.557179 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:28 crc kubenswrapper[4753]: I1205 17:05:28.557219 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:28 crc kubenswrapper[4753]: I1205 17:05:28.557227 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:28 crc kubenswrapper[4753]: I1205 17:05:28.557241 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:28 crc kubenswrapper[4753]: I1205 17:05:28.557253 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:28Z","lastTransitionTime":"2025-12-05T17:05:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:28 crc kubenswrapper[4753]: I1205 17:05:28.659393 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:28 crc kubenswrapper[4753]: I1205 17:05:28.659430 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:28 crc kubenswrapper[4753]: I1205 17:05:28.659438 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:28 crc kubenswrapper[4753]: I1205 17:05:28.659452 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:28 crc kubenswrapper[4753]: I1205 17:05:28.659462 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:28Z","lastTransitionTime":"2025-12-05T17:05:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:28 crc kubenswrapper[4753]: I1205 17:05:28.719573 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:05:28 crc kubenswrapper[4753]: E1205 17:05:28.719736 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:05:28 crc kubenswrapper[4753]: I1205 17:05:28.719799 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:05:28 crc kubenswrapper[4753]: E1205 17:05:28.719848 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:05:28 crc kubenswrapper[4753]: I1205 17:05:28.719911 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jjgfd" Dec 05 17:05:28 crc kubenswrapper[4753]: E1205 17:05:28.719976 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jjgfd" podUID="00ab636b-9cc9-4a6f-8e6e-6442b35280ca" Dec 05 17:05:28 crc kubenswrapper[4753]: I1205 17:05:28.760958 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:28 crc kubenswrapper[4753]: I1205 17:05:28.760986 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:28 crc kubenswrapper[4753]: I1205 17:05:28.760997 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:28 crc kubenswrapper[4753]: I1205 17:05:28.761010 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:28 crc kubenswrapper[4753]: I1205 17:05:28.761020 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:28Z","lastTransitionTime":"2025-12-05T17:05:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:28 crc kubenswrapper[4753]: I1205 17:05:28.864231 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:28 crc kubenswrapper[4753]: I1205 17:05:28.864285 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:28 crc kubenswrapper[4753]: I1205 17:05:28.864302 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:28 crc kubenswrapper[4753]: I1205 17:05:28.864325 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:28 crc kubenswrapper[4753]: I1205 17:05:28.864341 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:28Z","lastTransitionTime":"2025-12-05T17:05:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:28 crc kubenswrapper[4753]: I1205 17:05:28.966946 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:28 crc kubenswrapper[4753]: I1205 17:05:28.966992 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:28 crc kubenswrapper[4753]: I1205 17:05:28.967002 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:28 crc kubenswrapper[4753]: I1205 17:05:28.967020 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:28 crc kubenswrapper[4753]: I1205 17:05:28.967032 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:28Z","lastTransitionTime":"2025-12-05T17:05:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:29 crc kubenswrapper[4753]: I1205 17:05:29.062896 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/00ab636b-9cc9-4a6f-8e6e-6442b35280ca-metrics-certs\") pod \"network-metrics-daemon-jjgfd\" (UID: \"00ab636b-9cc9-4a6f-8e6e-6442b35280ca\") " pod="openshift-multus/network-metrics-daemon-jjgfd" Dec 05 17:05:29 crc kubenswrapper[4753]: E1205 17:05:29.063101 4753 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 17:05:29 crc kubenswrapper[4753]: E1205 17:05:29.063243 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/00ab636b-9cc9-4a6f-8e6e-6442b35280ca-metrics-certs podName:00ab636b-9cc9-4a6f-8e6e-6442b35280ca nodeName:}" failed. No retries permitted until 2025-12-05 17:06:01.063217797 +0000 UTC m=+99.566325013 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/00ab636b-9cc9-4a6f-8e6e-6442b35280ca-metrics-certs") pod "network-metrics-daemon-jjgfd" (UID: "00ab636b-9cc9-4a6f-8e6e-6442b35280ca") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 17:05:29 crc kubenswrapper[4753]: I1205 17:05:29.070071 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:29 crc kubenswrapper[4753]: I1205 17:05:29.070117 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:29 crc kubenswrapper[4753]: I1205 17:05:29.070139 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:29 crc kubenswrapper[4753]: I1205 17:05:29.070179 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:29 crc kubenswrapper[4753]: I1205 17:05:29.070194 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:29Z","lastTransitionTime":"2025-12-05T17:05:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:29 crc kubenswrapper[4753]: I1205 17:05:29.172273 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:29 crc kubenswrapper[4753]: I1205 17:05:29.172333 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:29 crc kubenswrapper[4753]: I1205 17:05:29.172348 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:29 crc kubenswrapper[4753]: I1205 17:05:29.172368 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:29 crc kubenswrapper[4753]: I1205 17:05:29.172381 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:29Z","lastTransitionTime":"2025-12-05T17:05:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:29 crc kubenswrapper[4753]: I1205 17:05:29.274994 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:29 crc kubenswrapper[4753]: I1205 17:05:29.275029 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:29 crc kubenswrapper[4753]: I1205 17:05:29.275039 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:29 crc kubenswrapper[4753]: I1205 17:05:29.275056 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:29 crc kubenswrapper[4753]: I1205 17:05:29.275067 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:29Z","lastTransitionTime":"2025-12-05T17:05:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:29 crc kubenswrapper[4753]: I1205 17:05:29.378824 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:29 crc kubenswrapper[4753]: I1205 17:05:29.378866 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:29 crc kubenswrapper[4753]: I1205 17:05:29.378877 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:29 crc kubenswrapper[4753]: I1205 17:05:29.378895 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:29 crc kubenswrapper[4753]: I1205 17:05:29.378911 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:29Z","lastTransitionTime":"2025-12-05T17:05:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:29 crc kubenswrapper[4753]: I1205 17:05:29.481289 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:29 crc kubenswrapper[4753]: I1205 17:05:29.481354 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:29 crc kubenswrapper[4753]: I1205 17:05:29.481368 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:29 crc kubenswrapper[4753]: I1205 17:05:29.481386 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:29 crc kubenswrapper[4753]: I1205 17:05:29.481399 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:29Z","lastTransitionTime":"2025-12-05T17:05:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:29 crc kubenswrapper[4753]: I1205 17:05:29.583665 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:29 crc kubenswrapper[4753]: I1205 17:05:29.583775 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:29 crc kubenswrapper[4753]: I1205 17:05:29.583792 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:29 crc kubenswrapper[4753]: I1205 17:05:29.583818 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:29 crc kubenswrapper[4753]: I1205 17:05:29.583835 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:29Z","lastTransitionTime":"2025-12-05T17:05:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:29 crc kubenswrapper[4753]: I1205 17:05:29.687015 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:29 crc kubenswrapper[4753]: I1205 17:05:29.687050 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:29 crc kubenswrapper[4753]: I1205 17:05:29.687061 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:29 crc kubenswrapper[4753]: I1205 17:05:29.687076 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:29 crc kubenswrapper[4753]: I1205 17:05:29.687085 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:29Z","lastTransitionTime":"2025-12-05T17:05:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:29 crc kubenswrapper[4753]: I1205 17:05:29.719649 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:05:29 crc kubenswrapper[4753]: E1205 17:05:29.719809 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:05:29 crc kubenswrapper[4753]: I1205 17:05:29.789455 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:29 crc kubenswrapper[4753]: I1205 17:05:29.789493 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:29 crc kubenswrapper[4753]: I1205 17:05:29.789505 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:29 crc kubenswrapper[4753]: I1205 17:05:29.789523 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:29 crc kubenswrapper[4753]: I1205 17:05:29.789532 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:29Z","lastTransitionTime":"2025-12-05T17:05:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:29 crc kubenswrapper[4753]: I1205 17:05:29.891928 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:29 crc kubenswrapper[4753]: I1205 17:05:29.891976 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:29 crc kubenswrapper[4753]: I1205 17:05:29.891988 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:29 crc kubenswrapper[4753]: I1205 17:05:29.892009 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:29 crc kubenswrapper[4753]: I1205 17:05:29.892024 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:29Z","lastTransitionTime":"2025-12-05T17:05:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:29 crc kubenswrapper[4753]: I1205 17:05:29.994599 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:29 crc kubenswrapper[4753]: I1205 17:05:29.994888 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:29 crc kubenswrapper[4753]: I1205 17:05:29.994900 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:29 crc kubenswrapper[4753]: I1205 17:05:29.994917 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:29 crc kubenswrapper[4753]: I1205 17:05:29.994928 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:29Z","lastTransitionTime":"2025-12-05T17:05:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:30 crc kubenswrapper[4753]: I1205 17:05:30.091017 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-hpl8r_6b3d3501-4f16-4375-adf2-fd54b1cd13cf/kube-multus/0.log" Dec 05 17:05:30 crc kubenswrapper[4753]: I1205 17:05:30.091071 4753 generic.go:334] "Generic (PLEG): container finished" podID="6b3d3501-4f16-4375-adf2-fd54b1cd13cf" containerID="8b2cc49130697a15123f202462d2f0781903baea370d4906d22d1b39c23320d7" exitCode=1 Dec 05 17:05:30 crc kubenswrapper[4753]: I1205 17:05:30.091104 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-hpl8r" event={"ID":"6b3d3501-4f16-4375-adf2-fd54b1cd13cf","Type":"ContainerDied","Data":"8b2cc49130697a15123f202462d2f0781903baea370d4906d22d1b39c23320d7"} Dec 05 17:05:30 crc kubenswrapper[4753]: I1205 17:05:30.091581 4753 scope.go:117] "RemoveContainer" containerID="8b2cc49130697a15123f202462d2f0781903baea370d4906d22d1b39c23320d7" Dec 05 17:05:30 crc kubenswrapper[4753]: I1205 17:05:30.101196 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-f6qn6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"693c26bb-e75f-4f7f-bd2a-bdf7dcb0af06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02ea0085a961c324f4a426d68972043f4b24b3aa03aed623ebc46bbafc857d1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g4w6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-f6qn6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:30Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:30 crc kubenswrapper[4753]: I1205 17:05:30.105649 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:30 crc kubenswrapper[4753]: I1205 17:05:30.105702 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:30 crc kubenswrapper[4753]: I1205 17:05:30.105719 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:30 crc kubenswrapper[4753]: I1205 17:05:30.105742 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:30 crc kubenswrapper[4753]: I1205 17:05:30.105758 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:30Z","lastTransitionTime":"2025-12-05T17:05:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:30 crc kubenswrapper[4753]: I1205 17:05:30.112653 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bhvk4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"338d3da9-209c-4ca9-a37d-6ea5731d1622\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7747e4738055a7b994119bad9b30d47e0b510c7407c53df29b674c553dbbcbb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qh6zg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://097885ac231f081a81d51cf0091df6f93ca49068340438454db429194e5475b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qh6zg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-bhvk4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:30Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:30 crc kubenswrapper[4753]: I1205 17:05:30.126050 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65284411-042e-46b9-8e55-cabc9e78a397\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e83b48779d4b8b709f9bd7351180040315002f738ada5ab034c883f87ef8734\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f6e27e109eeb52dae4e4b35bdf84e78f0786fc8d15c2498dd8d8c70c3d64e4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7886d72b05768ec1d515cfb2837f52ae16e82445b72a3016762308b80892d55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a662a8ca2ca9bcf02ace463bf26e44293b9776c69f3f1f84110392c3cab6e83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:30Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:30 crc kubenswrapper[4753]: I1205 17:05:30.136457 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fb6627d3-0817-4737-b5c7-00b2abc67b0f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bba91e82256742fa5b98e2e481b8ab267ce89d71e180581f34fa371d03bbc1c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf90148205c7fde7c73b3ed1130fa5c9fc4b6562c0e1f4f6b009a72677f32f22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95027e80190e6caa0524ede7ef31c38c8861aeca94936b1a7a819fdc650c7969\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1bd3444395954ccae8d5c8137c6ebd7f62a6c864cea89f98a137af3e26cda8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1bd3444395954ccae8d5c8137c6ebd7f62a6c864cea89f98a137af3e26cda8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:30Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:30 crc kubenswrapper[4753]: I1205 17:05:30.151276 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2rg4s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"751d4d21-4eb8-4236-bca2-d81f094ff2f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bda355ba818e87604cf7cfba53f0ee3116f2d4234e7d9631ec7e6571ed34030\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa8d4e5672a07ea28ddba02eaaf603750cd3067ff47165776d2dc7f2ad42a290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa8d4e5672a07ea28ddba02eaaf603750cd3067ff47165776d2dc7f2ad42a290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87e685f0d2c857d143c3ee4b13adf2afe6796c40ae87e2a304b78783907a785a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87e685f0d2c857d143c3ee4b13adf2afe6796c40ae87e2a304b78783907a785a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://842a75d6b100069785dbaf413df3b610a5df28c9ec124100eb31c965985282fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://842a75d6b100069785dbaf413df3b610a5df28c9ec124100eb31c965985282fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4ac00f8ef346db3de91e7bec01a56eac9f27d5a501d6f686173ece4897b9694\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4ac00f8ef346db3de91e7bec01a56eac9f27d5a501d6f686173ece4897b9694\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3585437cbe0f17e5bf92a74123f0e6f13475b6e5d49b145f2b9b869559b53310\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3585437cbe0f17e5bf92a74123f0e6f13475b6e5d49b145f2b9b869559b53310\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e60aa96f2377e63e44e932efe51f79832b778b8ef41fd47e728a80286956c62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e60aa96f2377e63e44e932efe51f79832b778b8ef41fd47e728a80286956c62\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2rg4s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:30Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:30 crc kubenswrapper[4753]: I1205 17:05:30.169750 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f6585b3b62fc4a78f89b3413326d6d5259cb8c338936f2d5def6185d81d46f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cb72322d436f10b006a4bb7a91b255451aba90e86100fdf249be6443159bd89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d0273d889da0bd6ea3db8040da2eab62fc750f308cb9a62dee71b6c9f2fadd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e79e99eb44ff95c2f9929326313e41629e6e03ebc068537bd27bc4c89ad5b0ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20bcab2c2912120c819bacdf478db82a78500908125e6efadaef1f3409eb0d49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ead15f2a3179f03d770beadcaa1374561d5aebb4874615a334edccff0c2f300\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b6e87477fcc60efe5e27b6865aa0f91f58bb756f255afe613932a906bd6047a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b6e87477fcc60efe5e27b6865aa0f91f58bb756f255afe613932a906bd6047a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:05:17Z\\\",\\\"message\\\":\\\"all/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 17:05:17.523236 6490 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 17:05:17.523267 6490 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1205 17:05:17.523298 6490 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1205 17:05:17.523307 6490 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1205 17:05:17.523331 6490 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1205 17:05:17.523345 6490 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1205 17:05:17.523344 6490 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1205 17:05:17.523350 6490 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1205 17:05:17.523358 6490 handler.go:208] Removed *v1.Node event handler 7\\\\nI1205 17:05:17.523364 6490 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 17:05:17.523371 6490 handler.go:208] Removed *v1.Node event handler 2\\\\nI1205 17:05:17.523370 6490 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 17:05:17.523388 6490 factory.go:656] Stopping watch factory\\\\nI1205 17:05:17.523400 6490 ovnkube.go:599] Stopped ovnkube\\\\nI1205 17:05:17.523437 6490 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 17\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:05:16Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-98fvv_openshift-ovn-kubernetes(f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://899db0b5d45776b0560a2908ce63ec8ea2b458e2e492189737e930688b4ed2e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://073ffcef7324b498f2cb47680b64bf44e474a7732a5c2b34661da22c3e0f6c17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://073ffcef7324b498f2cb47680b64bf44e474a7732a5c2b34661da22c3e0f6c17\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-98fvv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:30Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:30 crc kubenswrapper[4753]: I1205 17:05:30.181848 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://408690fbd818b50273c914cc463acec30ff77bbf7d26680bffa2451a1d1ddcf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:30Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:30 crc kubenswrapper[4753]: I1205 17:05:30.192217 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78c3b80f4d08dc9fd4215fc87bbd7331f765f301fa587f5fd4ded54cd8db6587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60c5d0ca3d26171050d592c3d76f0db4ca1a12344c40825a8e7c5fd579814c80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-khn68\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:30Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:30 crc kubenswrapper[4753]: I1205 17:05:30.203367 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-jjgfd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"00ab636b-9cc9-4a6f-8e6e-6442b35280ca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhlg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhlg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:57Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-jjgfd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:30Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:30 crc kubenswrapper[4753]: I1205 17:05:30.212823 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:30 crc kubenswrapper[4753]: I1205 17:05:30.213058 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:30 crc kubenswrapper[4753]: I1205 17:05:30.213140 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:30 crc kubenswrapper[4753]: I1205 17:05:30.213269 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:30 crc kubenswrapper[4753]: I1205 17:05:30.213356 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:30Z","lastTransitionTime":"2025-12-05T17:05:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:30 crc kubenswrapper[4753]: I1205 17:05:30.221065 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee37fdbe-64d5-4bb1-8522-932d83e0648e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://195b8bf18e6cf937def214e377ec9def799c19ea4d5f008b66cc42048656ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a5f37b26bfe7933088e08379662250e7b188efbbb5784d49e8d86262e10416\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0061bf86592b301a0a9398d2938e614f76616ed7ddcf310aa50c961632fb58b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20eb41ef00b7711fdfef0e4f3bd83b2fbdb193be96cafc0ef87f048271e7dd78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fea878c0275a3934d26327c13cf5a93ae3b881132788f3d031af5b382bbdd02\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 17:04:35.064086 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 17:04:35.066802 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1773129596/tls.crt::/tmp/serving-cert-1773129596/tls.key\\\\\\\"\\\\nI1205 17:04:40.565607 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 17:04:40.585450 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 17:04:40.585477 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 17:04:40.585506 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 17:04:40.585513 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:04:40.600592 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:04:40.600617 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600622 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600626 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:04:40.600629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:04:40.600632 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:04:40.600635 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:04:40.600704 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 17:04:40.602558 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0585e546af65a61265267b97083556b811ad35ee6c48fa84262b3f4f25eaf907\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:30Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:30 crc kubenswrapper[4753]: I1205 17:05:30.234825 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8934d1ec7490b421708e4678d69a55cdd381563ada3d945d066fcf4abba49f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e799acff1e50a664e95e30896e435bc77a35c8560044c5f180e16c3534bcf828\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:30Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:30 crc kubenswrapper[4753]: I1205 17:05:30.247502 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c52dbe30298514a572fd7cf4a301357ef0290af5894be7f9d08aff6c3fbe85a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:30Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:30 crc kubenswrapper[4753]: I1205 17:05:30.259962 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hpl8r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b2cc49130697a15123f202462d2f0781903baea370d4906d22d1b39c23320d7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b2cc49130697a15123f202462d2f0781903baea370d4906d22d1b39c23320d7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:05:30Z\\\",\\\"message\\\":\\\"2025-12-05T17:04:44+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_2d7d6f8b-5939-404d-a339-48cbe066dc8a\\\\n2025-12-05T17:04:44+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_2d7d6f8b-5939-404d-a339-48cbe066dc8a to /host/opt/cni/bin/\\\\n2025-12-05T17:04:44Z [verbose] multus-daemon started\\\\n2025-12-05T17:04:44Z [verbose] Readiness Indicator file check\\\\n2025-12-05T17:05:29Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmzrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hpl8r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:30Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:30 crc kubenswrapper[4753]: I1205 17:05:30.271576 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:30Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:30 crc kubenswrapper[4753]: I1205 17:05:30.283787 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:30Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:30 crc kubenswrapper[4753]: I1205 17:05:30.297124 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:30Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:30 crc kubenswrapper[4753]: I1205 17:05:30.307451 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vj5f7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f38be29-f040-4e7d-9026-36929c0c5cda\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b5d403644e00b367518058ecf22ed9913df9a1353e2fede8802deaa288a48e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc2cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vj5f7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:30Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:30 crc kubenswrapper[4753]: I1205 17:05:30.316451 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:30 crc kubenswrapper[4753]: I1205 17:05:30.316473 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:30 crc kubenswrapper[4753]: I1205 17:05:30.316483 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:30 crc kubenswrapper[4753]: I1205 17:05:30.316496 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:30 crc kubenswrapper[4753]: I1205 17:05:30.316504 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:30Z","lastTransitionTime":"2025-12-05T17:05:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:30 crc kubenswrapper[4753]: I1205 17:05:30.418810 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:30 crc kubenswrapper[4753]: I1205 17:05:30.418872 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:30 crc kubenswrapper[4753]: I1205 17:05:30.418891 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:30 crc kubenswrapper[4753]: I1205 17:05:30.418916 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:30 crc kubenswrapper[4753]: I1205 17:05:30.418932 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:30Z","lastTransitionTime":"2025-12-05T17:05:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:30 crc kubenswrapper[4753]: I1205 17:05:30.521467 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:30 crc kubenswrapper[4753]: I1205 17:05:30.521818 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:30 crc kubenswrapper[4753]: I1205 17:05:30.521903 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:30 crc kubenswrapper[4753]: I1205 17:05:30.522003 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:30 crc kubenswrapper[4753]: I1205 17:05:30.522090 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:30Z","lastTransitionTime":"2025-12-05T17:05:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:30 crc kubenswrapper[4753]: I1205 17:05:30.624639 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:30 crc kubenswrapper[4753]: I1205 17:05:30.624685 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:30 crc kubenswrapper[4753]: I1205 17:05:30.624697 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:30 crc kubenswrapper[4753]: I1205 17:05:30.624716 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:30 crc kubenswrapper[4753]: I1205 17:05:30.624756 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:30Z","lastTransitionTime":"2025-12-05T17:05:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:30 crc kubenswrapper[4753]: I1205 17:05:30.719441 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:05:30 crc kubenswrapper[4753]: I1205 17:05:30.719446 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:05:30 crc kubenswrapper[4753]: E1205 17:05:30.719585 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:05:30 crc kubenswrapper[4753]: I1205 17:05:30.719551 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jjgfd" Dec 05 17:05:30 crc kubenswrapper[4753]: E1205 17:05:30.719824 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:05:30 crc kubenswrapper[4753]: E1205 17:05:30.719918 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jjgfd" podUID="00ab636b-9cc9-4a6f-8e6e-6442b35280ca" Dec 05 17:05:30 crc kubenswrapper[4753]: I1205 17:05:30.726530 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:30 crc kubenswrapper[4753]: I1205 17:05:30.726697 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:30 crc kubenswrapper[4753]: I1205 17:05:30.726798 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:30 crc kubenswrapper[4753]: I1205 17:05:30.726886 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:30 crc kubenswrapper[4753]: I1205 17:05:30.726971 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:30Z","lastTransitionTime":"2025-12-05T17:05:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:30 crc kubenswrapper[4753]: I1205 17:05:30.829669 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:30 crc kubenswrapper[4753]: I1205 17:05:30.829700 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:30 crc kubenswrapper[4753]: I1205 17:05:30.829708 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:30 crc kubenswrapper[4753]: I1205 17:05:30.829723 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:30 crc kubenswrapper[4753]: I1205 17:05:30.829734 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:30Z","lastTransitionTime":"2025-12-05T17:05:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:30 crc kubenswrapper[4753]: I1205 17:05:30.931956 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:30 crc kubenswrapper[4753]: I1205 17:05:30.931994 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:30 crc kubenswrapper[4753]: I1205 17:05:30.932004 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:30 crc kubenswrapper[4753]: I1205 17:05:30.932019 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:30 crc kubenswrapper[4753]: I1205 17:05:30.932028 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:30Z","lastTransitionTime":"2025-12-05T17:05:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.034156 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.034208 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.034217 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.034234 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.034244 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:31Z","lastTransitionTime":"2025-12-05T17:05:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.096170 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-hpl8r_6b3d3501-4f16-4375-adf2-fd54b1cd13cf/kube-multus/0.log" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.096230 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-hpl8r" event={"ID":"6b3d3501-4f16-4375-adf2-fd54b1cd13cf","Type":"ContainerStarted","Data":"1096ebd14458819c0dbe4f5a86f2812eb19236d7d5062bfe2eda62ecdc05c55e"} Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.110027 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65284411-042e-46b9-8e55-cabc9e78a397\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e83b48779d4b8b709f9bd7351180040315002f738ada5ab034c883f87ef8734\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f6e27e109eeb52dae4e4b35bdf84e78f0786fc8d15c2498dd8d8c70c3d64e4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7886d72b05768ec1d515cfb2837f52ae16e82445b72a3016762308b80892d55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a662a8ca2ca9bcf02ace463bf26e44293b9776c69f3f1f84110392c3cab6e83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:31Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.121650 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fb6627d3-0817-4737-b5c7-00b2abc67b0f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bba91e82256742fa5b98e2e481b8ab267ce89d71e180581f34fa371d03bbc1c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf90148205c7fde7c73b3ed1130fa5c9fc4b6562c0e1f4f6b009a72677f32f22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95027e80190e6caa0524ede7ef31c38c8861aeca94936b1a7a819fdc650c7969\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1bd3444395954ccae8d5c8137c6ebd7f62a6c864cea89f98a137af3e26cda8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1bd3444395954ccae8d5c8137c6ebd7f62a6c864cea89f98a137af3e26cda8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:31Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.136904 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.136969 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.136983 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.137000 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.137011 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:31Z","lastTransitionTime":"2025-12-05T17:05:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.138798 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2rg4s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"751d4d21-4eb8-4236-bca2-d81f094ff2f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bda355ba818e87604cf7cfba53f0ee3116f2d4234e7d9631ec7e6571ed34030\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa8d4e5672a07ea28ddba02eaaf603750cd3067ff47165776d2dc7f2ad42a290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa8d4e5672a07ea28ddba02eaaf603750cd3067ff47165776d2dc7f2ad42a290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87e685f0d2c857d143c3ee4b13adf2afe6796c40ae87e2a304b78783907a785a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87e685f0d2c857d143c3ee4b13adf2afe6796c40ae87e2a304b78783907a785a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://842a75d6b100069785dbaf413df3b610a5df28c9ec124100eb31c965985282fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://842a75d6b100069785dbaf413df3b610a5df28c9ec124100eb31c965985282fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4ac00f8ef346db3de91e7bec01a56eac9f27d5a501d6f686173ece4897b9694\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4ac00f8ef346db3de91e7bec01a56eac9f27d5a501d6f686173ece4897b9694\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3585437cbe0f17e5bf92a74123f0e6f13475b6e5d49b145f2b9b869559b53310\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3585437cbe0f17e5bf92a74123f0e6f13475b6e5d49b145f2b9b869559b53310\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e60aa96f2377e63e44e932efe51f79832b778b8ef41fd47e728a80286956c62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e60aa96f2377e63e44e932efe51f79832b778b8ef41fd47e728a80286956c62\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2rg4s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:31Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.157854 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f6585b3b62fc4a78f89b3413326d6d5259cb8c338936f2d5def6185d81d46f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cb72322d436f10b006a4bb7a91b255451aba90e86100fdf249be6443159bd89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d0273d889da0bd6ea3db8040da2eab62fc750f308cb9a62dee71b6c9f2fadd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e79e99eb44ff95c2f9929326313e41629e6e03ebc068537bd27bc4c89ad5b0ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20bcab2c2912120c819bacdf478db82a78500908125e6efadaef1f3409eb0d49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ead15f2a3179f03d770beadcaa1374561d5aebb4874615a334edccff0c2f300\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b6e87477fcc60efe5e27b6865aa0f91f58bb756f255afe613932a906bd6047a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b6e87477fcc60efe5e27b6865aa0f91f58bb756f255afe613932a906bd6047a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:05:17Z\\\",\\\"message\\\":\\\"all/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 17:05:17.523236 6490 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 17:05:17.523267 6490 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1205 17:05:17.523298 6490 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1205 17:05:17.523307 6490 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1205 17:05:17.523331 6490 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1205 17:05:17.523345 6490 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1205 17:05:17.523344 6490 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1205 17:05:17.523350 6490 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1205 17:05:17.523358 6490 handler.go:208] Removed *v1.Node event handler 7\\\\nI1205 17:05:17.523364 6490 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 17:05:17.523371 6490 handler.go:208] Removed *v1.Node event handler 2\\\\nI1205 17:05:17.523370 6490 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 17:05:17.523388 6490 factory.go:656] Stopping watch factory\\\\nI1205 17:05:17.523400 6490 ovnkube.go:599] Stopped ovnkube\\\\nI1205 17:05:17.523437 6490 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 17\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:05:16Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-98fvv_openshift-ovn-kubernetes(f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://899db0b5d45776b0560a2908ce63ec8ea2b458e2e492189737e930688b4ed2e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://073ffcef7324b498f2cb47680b64bf44e474a7732a5c2b34661da22c3e0f6c17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://073ffcef7324b498f2cb47680b64bf44e474a7732a5c2b34661da22c3e0f6c17\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-98fvv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:31Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.168357 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-f6qn6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"693c26bb-e75f-4f7f-bd2a-bdf7dcb0af06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02ea0085a961c324f4a426d68972043f4b24b3aa03aed623ebc46bbafc857d1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g4w6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-f6qn6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:31Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.178393 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bhvk4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"338d3da9-209c-4ca9-a37d-6ea5731d1622\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7747e4738055a7b994119bad9b30d47e0b510c7407c53df29b674c553dbbcbb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qh6zg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://097885ac231f081a81d51cf0091df6f93ca49068340438454db429194e5475b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qh6zg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-bhvk4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:31Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.189444 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78c3b80f4d08dc9fd4215fc87bbd7331f765f301fa587f5fd4ded54cd8db6587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60c5d0ca3d26171050d592c3d76f0db4ca1a12344c40825a8e7c5fd579814c80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-khn68\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:31Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.198572 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-jjgfd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"00ab636b-9cc9-4a6f-8e6e-6442b35280ca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhlg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhlg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:57Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-jjgfd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:31Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.209205 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://408690fbd818b50273c914cc463acec30ff77bbf7d26680bffa2451a1d1ddcf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:31Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.220636 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8934d1ec7490b421708e4678d69a55cdd381563ada3d945d066fcf4abba49f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e799acff1e50a664e95e30896e435bc77a35c8560044c5f180e16c3534bcf828\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:31Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.232407 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c52dbe30298514a572fd7cf4a301357ef0290af5894be7f9d08aff6c3fbe85a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:31Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.239033 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.239079 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.239090 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.239104 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.239115 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:31Z","lastTransitionTime":"2025-12-05T17:05:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.244083 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hpl8r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1096ebd14458819c0dbe4f5a86f2812eb19236d7d5062bfe2eda62ecdc05c55e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b2cc49130697a15123f202462d2f0781903baea370d4906d22d1b39c23320d7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:05:30Z\\\",\\\"message\\\":\\\"2025-12-05T17:04:44+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_2d7d6f8b-5939-404d-a339-48cbe066dc8a\\\\n2025-12-05T17:04:44+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_2d7d6f8b-5939-404d-a339-48cbe066dc8a to /host/opt/cni/bin/\\\\n2025-12-05T17:04:44Z [verbose] multus-daemon started\\\\n2025-12-05T17:04:44Z [verbose] Readiness Indicator file check\\\\n2025-12-05T17:05:29Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:05:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmzrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hpl8r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:31Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.260222 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee37fdbe-64d5-4bb1-8522-932d83e0648e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://195b8bf18e6cf937def214e377ec9def799c19ea4d5f008b66cc42048656ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a5f37b26bfe7933088e08379662250e7b188efbbb5784d49e8d86262e10416\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0061bf86592b301a0a9398d2938e614f76616ed7ddcf310aa50c961632fb58b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20eb41ef00b7711fdfef0e4f3bd83b2fbdb193be96cafc0ef87f048271e7dd78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fea878c0275a3934d26327c13cf5a93ae3b881132788f3d031af5b382bbdd02\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 17:04:35.064086 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 17:04:35.066802 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1773129596/tls.crt::/tmp/serving-cert-1773129596/tls.key\\\\\\\"\\\\nI1205 17:04:40.565607 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 17:04:40.585450 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 17:04:40.585477 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 17:04:40.585506 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 17:04:40.585513 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:04:40.600592 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:04:40.600617 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600622 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600626 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:04:40.600629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:04:40.600632 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:04:40.600635 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:04:40.600704 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 17:04:40.602558 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0585e546af65a61265267b97083556b811ad35ee6c48fa84262b3f4f25eaf907\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:31Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.272483 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:31Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.283574 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:31Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.293382 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vj5f7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f38be29-f040-4e7d-9026-36929c0c5cda\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b5d403644e00b367518058ecf22ed9913df9a1353e2fede8802deaa288a48e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc2cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vj5f7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:31Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.304830 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:31Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.342352 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.342386 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.342395 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.342409 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.342420 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:31Z","lastTransitionTime":"2025-12-05T17:05:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.444409 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.444464 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.444477 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.444495 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.444506 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:31Z","lastTransitionTime":"2025-12-05T17:05:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.546548 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.546658 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.546673 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.546737 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.546753 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:31Z","lastTransitionTime":"2025-12-05T17:05:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.649142 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.649195 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.649203 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.649243 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.649252 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:31Z","lastTransitionTime":"2025-12-05T17:05:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.720264 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:05:31 crc kubenswrapper[4753]: E1205 17:05:31.720412 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.721009 4753 scope.go:117] "RemoveContainer" containerID="9b6e87477fcc60efe5e27b6865aa0f91f58bb756f255afe613932a906bd6047a" Dec 05 17:05:31 crc kubenswrapper[4753]: E1205 17:05:31.721198 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-98fvv_openshift-ovn-kubernetes(f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a)\"" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" podUID="f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.732108 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65284411-042e-46b9-8e55-cabc9e78a397\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e83b48779d4b8b709f9bd7351180040315002f738ada5ab034c883f87ef8734\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f6e27e109eeb52dae4e4b35bdf84e78f0786fc8d15c2498dd8d8c70c3d64e4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7886d72b05768ec1d515cfb2837f52ae16e82445b72a3016762308b80892d55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a662a8ca2ca9bcf02ace463bf26e44293b9776c69f3f1f84110392c3cab6e83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:31Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.746794 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fb6627d3-0817-4737-b5c7-00b2abc67b0f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bba91e82256742fa5b98e2e481b8ab267ce89d71e180581f34fa371d03bbc1c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf90148205c7fde7c73b3ed1130fa5c9fc4b6562c0e1f4f6b009a72677f32f22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95027e80190e6caa0524ede7ef31c38c8861aeca94936b1a7a819fdc650c7969\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1bd3444395954ccae8d5c8137c6ebd7f62a6c864cea89f98a137af3e26cda8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1bd3444395954ccae8d5c8137c6ebd7f62a6c864cea89f98a137af3e26cda8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:31Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.750806 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.750842 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.750854 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.750869 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.750880 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:31Z","lastTransitionTime":"2025-12-05T17:05:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.759893 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2rg4s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"751d4d21-4eb8-4236-bca2-d81f094ff2f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bda355ba818e87604cf7cfba53f0ee3116f2d4234e7d9631ec7e6571ed34030\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa8d4e5672a07ea28ddba02eaaf603750cd3067ff47165776d2dc7f2ad42a290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa8d4e5672a07ea28ddba02eaaf603750cd3067ff47165776d2dc7f2ad42a290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87e685f0d2c857d143c3ee4b13adf2afe6796c40ae87e2a304b78783907a785a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87e685f0d2c857d143c3ee4b13adf2afe6796c40ae87e2a304b78783907a785a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://842a75d6b100069785dbaf413df3b610a5df28c9ec124100eb31c965985282fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://842a75d6b100069785dbaf413df3b610a5df28c9ec124100eb31c965985282fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4ac00f8ef346db3de91e7bec01a56eac9f27d5a501d6f686173ece4897b9694\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4ac00f8ef346db3de91e7bec01a56eac9f27d5a501d6f686173ece4897b9694\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3585437cbe0f17e5bf92a74123f0e6f13475b6e5d49b145f2b9b869559b53310\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3585437cbe0f17e5bf92a74123f0e6f13475b6e5d49b145f2b9b869559b53310\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e60aa96f2377e63e44e932efe51f79832b778b8ef41fd47e728a80286956c62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e60aa96f2377e63e44e932efe51f79832b778b8ef41fd47e728a80286956c62\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2rg4s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:31Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.780371 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f6585b3b62fc4a78f89b3413326d6d5259cb8c338936f2d5def6185d81d46f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cb72322d436f10b006a4bb7a91b255451aba90e86100fdf249be6443159bd89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d0273d889da0bd6ea3db8040da2eab62fc750f308cb9a62dee71b6c9f2fadd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e79e99eb44ff95c2f9929326313e41629e6e03ebc068537bd27bc4c89ad5b0ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20bcab2c2912120c819bacdf478db82a78500908125e6efadaef1f3409eb0d49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ead15f2a3179f03d770beadcaa1374561d5aebb4874615a334edccff0c2f300\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b6e87477fcc60efe5e27b6865aa0f91f58bb756f255afe613932a906bd6047a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b6e87477fcc60efe5e27b6865aa0f91f58bb756f255afe613932a906bd6047a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:05:17Z\\\",\\\"message\\\":\\\"all/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 17:05:17.523236 6490 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 17:05:17.523267 6490 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1205 17:05:17.523298 6490 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1205 17:05:17.523307 6490 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1205 17:05:17.523331 6490 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1205 17:05:17.523345 6490 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1205 17:05:17.523344 6490 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1205 17:05:17.523350 6490 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1205 17:05:17.523358 6490 handler.go:208] Removed *v1.Node event handler 7\\\\nI1205 17:05:17.523364 6490 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 17:05:17.523371 6490 handler.go:208] Removed *v1.Node event handler 2\\\\nI1205 17:05:17.523370 6490 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 17:05:17.523388 6490 factory.go:656] Stopping watch factory\\\\nI1205 17:05:17.523400 6490 ovnkube.go:599] Stopped ovnkube\\\\nI1205 17:05:17.523437 6490 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 17\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:05:16Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-98fvv_openshift-ovn-kubernetes(f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://899db0b5d45776b0560a2908ce63ec8ea2b458e2e492189737e930688b4ed2e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://073ffcef7324b498f2cb47680b64bf44e474a7732a5c2b34661da22c3e0f6c17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://073ffcef7324b498f2cb47680b64bf44e474a7732a5c2b34661da22c3e0f6c17\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-98fvv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:31Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.791686 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-f6qn6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"693c26bb-e75f-4f7f-bd2a-bdf7dcb0af06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02ea0085a961c324f4a426d68972043f4b24b3aa03aed623ebc46bbafc857d1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g4w6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-f6qn6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:31Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.803210 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bhvk4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"338d3da9-209c-4ca9-a37d-6ea5731d1622\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7747e4738055a7b994119bad9b30d47e0b510c7407c53df29b674c553dbbcbb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qh6zg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://097885ac231f081a81d51cf0091df6f93ca49068340438454db429194e5475b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qh6zg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-bhvk4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:31Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.813767 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78c3b80f4d08dc9fd4215fc87bbd7331f765f301fa587f5fd4ded54cd8db6587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60c5d0ca3d26171050d592c3d76f0db4ca1a12344c40825a8e7c5fd579814c80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-khn68\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:31Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.823338 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-jjgfd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"00ab636b-9cc9-4a6f-8e6e-6442b35280ca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhlg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhlg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:57Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-jjgfd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:31Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.834219 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://408690fbd818b50273c914cc463acec30ff77bbf7d26680bffa2451a1d1ddcf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:31Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.849376 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8934d1ec7490b421708e4678d69a55cdd381563ada3d945d066fcf4abba49f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e799acff1e50a664e95e30896e435bc77a35c8560044c5f180e16c3534bcf828\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:31Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.852365 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.852407 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.852415 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.852428 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.852439 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:31Z","lastTransitionTime":"2025-12-05T17:05:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.861765 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c52dbe30298514a572fd7cf4a301357ef0290af5894be7f9d08aff6c3fbe85a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:31Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.874129 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hpl8r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1096ebd14458819c0dbe4f5a86f2812eb19236d7d5062bfe2eda62ecdc05c55e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b2cc49130697a15123f202462d2f0781903baea370d4906d22d1b39c23320d7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:05:30Z\\\",\\\"message\\\":\\\"2025-12-05T17:04:44+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_2d7d6f8b-5939-404d-a339-48cbe066dc8a\\\\n2025-12-05T17:04:44+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_2d7d6f8b-5939-404d-a339-48cbe066dc8a to /host/opt/cni/bin/\\\\n2025-12-05T17:04:44Z [verbose] multus-daemon started\\\\n2025-12-05T17:04:44Z [verbose] Readiness Indicator file check\\\\n2025-12-05T17:05:29Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:05:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmzrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hpl8r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:31Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.886973 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee37fdbe-64d5-4bb1-8522-932d83e0648e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://195b8bf18e6cf937def214e377ec9def799c19ea4d5f008b66cc42048656ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a5f37b26bfe7933088e08379662250e7b188efbbb5784d49e8d86262e10416\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0061bf86592b301a0a9398d2938e614f76616ed7ddcf310aa50c961632fb58b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20eb41ef00b7711fdfef0e4f3bd83b2fbdb193be96cafc0ef87f048271e7dd78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fea878c0275a3934d26327c13cf5a93ae3b881132788f3d031af5b382bbdd02\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 17:04:35.064086 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 17:04:35.066802 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1773129596/tls.crt::/tmp/serving-cert-1773129596/tls.key\\\\\\\"\\\\nI1205 17:04:40.565607 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 17:04:40.585450 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 17:04:40.585477 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 17:04:40.585506 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 17:04:40.585513 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:04:40.600592 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:04:40.600617 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600622 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600626 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:04:40.600629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:04:40.600632 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:04:40.600635 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:04:40.600704 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 17:04:40.602558 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0585e546af65a61265267b97083556b811ad35ee6c48fa84262b3f4f25eaf907\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:31Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.899577 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:31Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.911186 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:31Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.920944 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vj5f7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f38be29-f040-4e7d-9026-36929c0c5cda\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b5d403644e00b367518058ecf22ed9913df9a1353e2fede8802deaa288a48e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc2cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vj5f7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:31Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.931856 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:31Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.955054 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.955197 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.955211 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.955255 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:31 crc kubenswrapper[4753]: I1205 17:05:31.955266 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:31Z","lastTransitionTime":"2025-12-05T17:05:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:32 crc kubenswrapper[4753]: I1205 17:05:32.058461 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:32 crc kubenswrapper[4753]: I1205 17:05:32.058499 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:32 crc kubenswrapper[4753]: I1205 17:05:32.058508 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:32 crc kubenswrapper[4753]: I1205 17:05:32.058522 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:32 crc kubenswrapper[4753]: I1205 17:05:32.058531 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:32Z","lastTransitionTime":"2025-12-05T17:05:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:32 crc kubenswrapper[4753]: I1205 17:05:32.161106 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:32 crc kubenswrapper[4753]: I1205 17:05:32.161202 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:32 crc kubenswrapper[4753]: I1205 17:05:32.161223 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:32 crc kubenswrapper[4753]: I1205 17:05:32.161248 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:32 crc kubenswrapper[4753]: I1205 17:05:32.161266 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:32Z","lastTransitionTime":"2025-12-05T17:05:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:32 crc kubenswrapper[4753]: I1205 17:05:32.263962 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:32 crc kubenswrapper[4753]: I1205 17:05:32.264029 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:32 crc kubenswrapper[4753]: I1205 17:05:32.264040 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:32 crc kubenswrapper[4753]: I1205 17:05:32.264054 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:32 crc kubenswrapper[4753]: I1205 17:05:32.264062 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:32Z","lastTransitionTime":"2025-12-05T17:05:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:32 crc kubenswrapper[4753]: I1205 17:05:32.366483 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:32 crc kubenswrapper[4753]: I1205 17:05:32.366521 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:32 crc kubenswrapper[4753]: I1205 17:05:32.366532 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:32 crc kubenswrapper[4753]: I1205 17:05:32.366547 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:32 crc kubenswrapper[4753]: I1205 17:05:32.366556 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:32Z","lastTransitionTime":"2025-12-05T17:05:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:32 crc kubenswrapper[4753]: I1205 17:05:32.468608 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:32 crc kubenswrapper[4753]: I1205 17:05:32.468647 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:32 crc kubenswrapper[4753]: I1205 17:05:32.468677 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:32 crc kubenswrapper[4753]: I1205 17:05:32.468694 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:32 crc kubenswrapper[4753]: I1205 17:05:32.468707 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:32Z","lastTransitionTime":"2025-12-05T17:05:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:32 crc kubenswrapper[4753]: I1205 17:05:32.571227 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:32 crc kubenswrapper[4753]: I1205 17:05:32.571273 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:32 crc kubenswrapper[4753]: I1205 17:05:32.571282 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:32 crc kubenswrapper[4753]: I1205 17:05:32.571297 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:32 crc kubenswrapper[4753]: I1205 17:05:32.571307 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:32Z","lastTransitionTime":"2025-12-05T17:05:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:32 crc kubenswrapper[4753]: I1205 17:05:32.674183 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:32 crc kubenswrapper[4753]: I1205 17:05:32.674213 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:32 crc kubenswrapper[4753]: I1205 17:05:32.674221 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:32 crc kubenswrapper[4753]: I1205 17:05:32.674236 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:32 crc kubenswrapper[4753]: I1205 17:05:32.674245 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:32Z","lastTransitionTime":"2025-12-05T17:05:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:32 crc kubenswrapper[4753]: I1205 17:05:32.719584 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:05:32 crc kubenswrapper[4753]: I1205 17:05:32.719863 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:05:32 crc kubenswrapper[4753]: I1205 17:05:32.719883 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jjgfd" Dec 05 17:05:32 crc kubenswrapper[4753]: E1205 17:05:32.719963 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:05:32 crc kubenswrapper[4753]: E1205 17:05:32.720040 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:05:32 crc kubenswrapper[4753]: E1205 17:05:32.720176 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jjgfd" podUID="00ab636b-9cc9-4a6f-8e6e-6442b35280ca" Dec 05 17:05:32 crc kubenswrapper[4753]: I1205 17:05:32.776822 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:32 crc kubenswrapper[4753]: I1205 17:05:32.776863 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:32 crc kubenswrapper[4753]: I1205 17:05:32.776872 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:32 crc kubenswrapper[4753]: I1205 17:05:32.776888 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:32 crc kubenswrapper[4753]: I1205 17:05:32.776901 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:32Z","lastTransitionTime":"2025-12-05T17:05:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:32 crc kubenswrapper[4753]: I1205 17:05:32.879419 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:32 crc kubenswrapper[4753]: I1205 17:05:32.879453 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:32 crc kubenswrapper[4753]: I1205 17:05:32.879461 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:32 crc kubenswrapper[4753]: I1205 17:05:32.879477 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:32 crc kubenswrapper[4753]: I1205 17:05:32.879508 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:32Z","lastTransitionTime":"2025-12-05T17:05:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:32 crc kubenswrapper[4753]: I1205 17:05:32.981455 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:32 crc kubenswrapper[4753]: I1205 17:05:32.981498 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:32 crc kubenswrapper[4753]: I1205 17:05:32.981511 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:32 crc kubenswrapper[4753]: I1205 17:05:32.981525 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:32 crc kubenswrapper[4753]: I1205 17:05:32.981536 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:32Z","lastTransitionTime":"2025-12-05T17:05:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:33 crc kubenswrapper[4753]: I1205 17:05:33.083567 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:33 crc kubenswrapper[4753]: I1205 17:05:33.083600 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:33 crc kubenswrapper[4753]: I1205 17:05:33.083608 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:33 crc kubenswrapper[4753]: I1205 17:05:33.083622 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:33 crc kubenswrapper[4753]: I1205 17:05:33.083631 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:33Z","lastTransitionTime":"2025-12-05T17:05:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:33 crc kubenswrapper[4753]: I1205 17:05:33.185772 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:33 crc kubenswrapper[4753]: I1205 17:05:33.185810 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:33 crc kubenswrapper[4753]: I1205 17:05:33.185822 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:33 crc kubenswrapper[4753]: I1205 17:05:33.185838 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:33 crc kubenswrapper[4753]: I1205 17:05:33.185848 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:33Z","lastTransitionTime":"2025-12-05T17:05:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:33 crc kubenswrapper[4753]: I1205 17:05:33.288191 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:33 crc kubenswrapper[4753]: I1205 17:05:33.288224 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:33 crc kubenswrapper[4753]: I1205 17:05:33.288234 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:33 crc kubenswrapper[4753]: I1205 17:05:33.288247 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:33 crc kubenswrapper[4753]: I1205 17:05:33.288256 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:33Z","lastTransitionTime":"2025-12-05T17:05:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:33 crc kubenswrapper[4753]: I1205 17:05:33.390801 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:33 crc kubenswrapper[4753]: I1205 17:05:33.390856 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:33 crc kubenswrapper[4753]: I1205 17:05:33.390871 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:33 crc kubenswrapper[4753]: I1205 17:05:33.390936 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:33 crc kubenswrapper[4753]: I1205 17:05:33.390949 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:33Z","lastTransitionTime":"2025-12-05T17:05:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:33 crc kubenswrapper[4753]: I1205 17:05:33.493008 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:33 crc kubenswrapper[4753]: I1205 17:05:33.493051 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:33 crc kubenswrapper[4753]: I1205 17:05:33.493063 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:33 crc kubenswrapper[4753]: I1205 17:05:33.493085 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:33 crc kubenswrapper[4753]: I1205 17:05:33.493097 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:33Z","lastTransitionTime":"2025-12-05T17:05:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:33 crc kubenswrapper[4753]: I1205 17:05:33.595864 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:33 crc kubenswrapper[4753]: I1205 17:05:33.595915 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:33 crc kubenswrapper[4753]: I1205 17:05:33.595927 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:33 crc kubenswrapper[4753]: I1205 17:05:33.595948 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:33 crc kubenswrapper[4753]: I1205 17:05:33.595960 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:33Z","lastTransitionTime":"2025-12-05T17:05:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:33 crc kubenswrapper[4753]: I1205 17:05:33.698788 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:33 crc kubenswrapper[4753]: I1205 17:05:33.698834 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:33 crc kubenswrapper[4753]: I1205 17:05:33.698917 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:33 crc kubenswrapper[4753]: I1205 17:05:33.698931 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:33 crc kubenswrapper[4753]: I1205 17:05:33.698941 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:33Z","lastTransitionTime":"2025-12-05T17:05:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:33 crc kubenswrapper[4753]: I1205 17:05:33.720389 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:05:33 crc kubenswrapper[4753]: E1205 17:05:33.720507 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:05:33 crc kubenswrapper[4753]: I1205 17:05:33.801004 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:33 crc kubenswrapper[4753]: I1205 17:05:33.801052 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:33 crc kubenswrapper[4753]: I1205 17:05:33.801064 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:33 crc kubenswrapper[4753]: I1205 17:05:33.801089 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:33 crc kubenswrapper[4753]: I1205 17:05:33.801105 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:33Z","lastTransitionTime":"2025-12-05T17:05:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:33 crc kubenswrapper[4753]: I1205 17:05:33.903372 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:33 crc kubenswrapper[4753]: I1205 17:05:33.903414 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:33 crc kubenswrapper[4753]: I1205 17:05:33.903424 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:33 crc kubenswrapper[4753]: I1205 17:05:33.903438 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:33 crc kubenswrapper[4753]: I1205 17:05:33.903448 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:33Z","lastTransitionTime":"2025-12-05T17:05:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:34 crc kubenswrapper[4753]: I1205 17:05:34.005738 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:34 crc kubenswrapper[4753]: I1205 17:05:34.005776 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:34 crc kubenswrapper[4753]: I1205 17:05:34.005784 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:34 crc kubenswrapper[4753]: I1205 17:05:34.005805 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:34 crc kubenswrapper[4753]: I1205 17:05:34.005821 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:34Z","lastTransitionTime":"2025-12-05T17:05:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:34 crc kubenswrapper[4753]: I1205 17:05:34.108218 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:34 crc kubenswrapper[4753]: I1205 17:05:34.108261 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:34 crc kubenswrapper[4753]: I1205 17:05:34.108270 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:34 crc kubenswrapper[4753]: I1205 17:05:34.108285 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:34 crc kubenswrapper[4753]: I1205 17:05:34.108294 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:34Z","lastTransitionTime":"2025-12-05T17:05:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:34 crc kubenswrapper[4753]: I1205 17:05:34.210600 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:34 crc kubenswrapper[4753]: I1205 17:05:34.210641 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:34 crc kubenswrapper[4753]: I1205 17:05:34.210650 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:34 crc kubenswrapper[4753]: I1205 17:05:34.210665 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:34 crc kubenswrapper[4753]: I1205 17:05:34.210674 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:34Z","lastTransitionTime":"2025-12-05T17:05:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:34 crc kubenswrapper[4753]: I1205 17:05:34.312843 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:34 crc kubenswrapper[4753]: I1205 17:05:34.312875 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:34 crc kubenswrapper[4753]: I1205 17:05:34.312885 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:34 crc kubenswrapper[4753]: I1205 17:05:34.312905 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:34 crc kubenswrapper[4753]: I1205 17:05:34.312913 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:34Z","lastTransitionTime":"2025-12-05T17:05:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:34 crc kubenswrapper[4753]: I1205 17:05:34.415200 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:34 crc kubenswrapper[4753]: I1205 17:05:34.415254 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:34 crc kubenswrapper[4753]: I1205 17:05:34.415267 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:34 crc kubenswrapper[4753]: I1205 17:05:34.415286 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:34 crc kubenswrapper[4753]: I1205 17:05:34.415300 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:34Z","lastTransitionTime":"2025-12-05T17:05:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:34 crc kubenswrapper[4753]: I1205 17:05:34.517450 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:34 crc kubenswrapper[4753]: I1205 17:05:34.517482 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:34 crc kubenswrapper[4753]: I1205 17:05:34.517493 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:34 crc kubenswrapper[4753]: I1205 17:05:34.517506 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:34 crc kubenswrapper[4753]: I1205 17:05:34.517514 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:34Z","lastTransitionTime":"2025-12-05T17:05:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:34 crc kubenswrapper[4753]: I1205 17:05:34.619440 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:34 crc kubenswrapper[4753]: I1205 17:05:34.619475 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:34 crc kubenswrapper[4753]: I1205 17:05:34.619487 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:34 crc kubenswrapper[4753]: I1205 17:05:34.619509 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:34 crc kubenswrapper[4753]: I1205 17:05:34.619521 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:34Z","lastTransitionTime":"2025-12-05T17:05:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:34 crc kubenswrapper[4753]: I1205 17:05:34.720197 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:05:34 crc kubenswrapper[4753]: I1205 17:05:34.720232 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:05:34 crc kubenswrapper[4753]: I1205 17:05:34.720208 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jjgfd" Dec 05 17:05:34 crc kubenswrapper[4753]: E1205 17:05:34.720331 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:05:34 crc kubenswrapper[4753]: E1205 17:05:34.720574 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:05:34 crc kubenswrapper[4753]: E1205 17:05:34.720616 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jjgfd" podUID="00ab636b-9cc9-4a6f-8e6e-6442b35280ca" Dec 05 17:05:34 crc kubenswrapper[4753]: I1205 17:05:34.721953 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:34 crc kubenswrapper[4753]: I1205 17:05:34.721978 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:34 crc kubenswrapper[4753]: I1205 17:05:34.721986 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:34 crc kubenswrapper[4753]: I1205 17:05:34.722003 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:34 crc kubenswrapper[4753]: I1205 17:05:34.722022 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:34Z","lastTransitionTime":"2025-12-05T17:05:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:34 crc kubenswrapper[4753]: I1205 17:05:34.824144 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:34 crc kubenswrapper[4753]: I1205 17:05:34.824197 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:34 crc kubenswrapper[4753]: I1205 17:05:34.824207 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:34 crc kubenswrapper[4753]: I1205 17:05:34.824223 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:34 crc kubenswrapper[4753]: I1205 17:05:34.824233 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:34Z","lastTransitionTime":"2025-12-05T17:05:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:34 crc kubenswrapper[4753]: I1205 17:05:34.926906 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:34 crc kubenswrapper[4753]: I1205 17:05:34.926981 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:34 crc kubenswrapper[4753]: I1205 17:05:34.926995 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:34 crc kubenswrapper[4753]: I1205 17:05:34.927014 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:34 crc kubenswrapper[4753]: I1205 17:05:34.927027 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:34Z","lastTransitionTime":"2025-12-05T17:05:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:35 crc kubenswrapper[4753]: I1205 17:05:35.029728 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:35 crc kubenswrapper[4753]: I1205 17:05:35.029763 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:35 crc kubenswrapper[4753]: I1205 17:05:35.029773 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:35 crc kubenswrapper[4753]: I1205 17:05:35.029785 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:35 crc kubenswrapper[4753]: I1205 17:05:35.029796 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:35Z","lastTransitionTime":"2025-12-05T17:05:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:35 crc kubenswrapper[4753]: I1205 17:05:35.133169 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:35 crc kubenswrapper[4753]: I1205 17:05:35.133207 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:35 crc kubenswrapper[4753]: I1205 17:05:35.133218 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:35 crc kubenswrapper[4753]: I1205 17:05:35.133232 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:35 crc kubenswrapper[4753]: I1205 17:05:35.133242 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:35Z","lastTransitionTime":"2025-12-05T17:05:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:35 crc kubenswrapper[4753]: I1205 17:05:35.235445 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:35 crc kubenswrapper[4753]: I1205 17:05:35.235489 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:35 crc kubenswrapper[4753]: I1205 17:05:35.235498 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:35 crc kubenswrapper[4753]: I1205 17:05:35.235511 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:35 crc kubenswrapper[4753]: I1205 17:05:35.235520 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:35Z","lastTransitionTime":"2025-12-05T17:05:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:35 crc kubenswrapper[4753]: I1205 17:05:35.338412 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:35 crc kubenswrapper[4753]: I1205 17:05:35.338456 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:35 crc kubenswrapper[4753]: I1205 17:05:35.338467 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:35 crc kubenswrapper[4753]: I1205 17:05:35.338482 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:35 crc kubenswrapper[4753]: I1205 17:05:35.338491 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:35Z","lastTransitionTime":"2025-12-05T17:05:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:35 crc kubenswrapper[4753]: I1205 17:05:35.443077 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:35 crc kubenswrapper[4753]: I1205 17:05:35.443133 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:35 crc kubenswrapper[4753]: I1205 17:05:35.443142 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:35 crc kubenswrapper[4753]: I1205 17:05:35.443180 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:35 crc kubenswrapper[4753]: I1205 17:05:35.443189 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:35Z","lastTransitionTime":"2025-12-05T17:05:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:35 crc kubenswrapper[4753]: I1205 17:05:35.545786 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:35 crc kubenswrapper[4753]: I1205 17:05:35.545848 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:35 crc kubenswrapper[4753]: I1205 17:05:35.545864 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:35 crc kubenswrapper[4753]: I1205 17:05:35.545887 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:35 crc kubenswrapper[4753]: I1205 17:05:35.545903 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:35Z","lastTransitionTime":"2025-12-05T17:05:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:35 crc kubenswrapper[4753]: I1205 17:05:35.648124 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:35 crc kubenswrapper[4753]: I1205 17:05:35.648258 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:35 crc kubenswrapper[4753]: I1205 17:05:35.648281 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:35 crc kubenswrapper[4753]: I1205 17:05:35.648303 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:35 crc kubenswrapper[4753]: I1205 17:05:35.648316 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:35Z","lastTransitionTime":"2025-12-05T17:05:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:35 crc kubenswrapper[4753]: I1205 17:05:35.719827 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:05:35 crc kubenswrapper[4753]: E1205 17:05:35.719987 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:05:35 crc kubenswrapper[4753]: I1205 17:05:35.750448 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:35 crc kubenswrapper[4753]: I1205 17:05:35.750508 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:35 crc kubenswrapper[4753]: I1205 17:05:35.750520 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:35 crc kubenswrapper[4753]: I1205 17:05:35.750533 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:35 crc kubenswrapper[4753]: I1205 17:05:35.750542 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:35Z","lastTransitionTime":"2025-12-05T17:05:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:35 crc kubenswrapper[4753]: I1205 17:05:35.853189 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:35 crc kubenswrapper[4753]: I1205 17:05:35.853227 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:35 crc kubenswrapper[4753]: I1205 17:05:35.853235 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:35 crc kubenswrapper[4753]: I1205 17:05:35.853248 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:35 crc kubenswrapper[4753]: I1205 17:05:35.853259 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:35Z","lastTransitionTime":"2025-12-05T17:05:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:35 crc kubenswrapper[4753]: I1205 17:05:35.955866 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:35 crc kubenswrapper[4753]: I1205 17:05:35.955900 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:35 crc kubenswrapper[4753]: I1205 17:05:35.955909 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:35 crc kubenswrapper[4753]: I1205 17:05:35.955925 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:35 crc kubenswrapper[4753]: I1205 17:05:35.955936 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:35Z","lastTransitionTime":"2025-12-05T17:05:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:35 crc kubenswrapper[4753]: I1205 17:05:35.997336 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:35 crc kubenswrapper[4753]: I1205 17:05:35.997373 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:35 crc kubenswrapper[4753]: I1205 17:05:35.997385 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:35 crc kubenswrapper[4753]: I1205 17:05:35.997400 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:35 crc kubenswrapper[4753]: I1205 17:05:35.997410 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:35Z","lastTransitionTime":"2025-12-05T17:05:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:36 crc kubenswrapper[4753]: E1205 17:05:36.009044 4753 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:35Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6cf3541b-b15a-4035-99a1-dd4a7b86e07c\\\",\\\"systemUUID\\\":\\\"6ae126b1-60b3-4aa4-8711-3da4c1b89426\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:36Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:36 crc kubenswrapper[4753]: I1205 17:05:36.012473 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:36 crc kubenswrapper[4753]: I1205 17:05:36.012513 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:36 crc kubenswrapper[4753]: I1205 17:05:36.012563 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:36 crc kubenswrapper[4753]: I1205 17:05:36.012583 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:36 crc kubenswrapper[4753]: I1205 17:05:36.012597 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:36Z","lastTransitionTime":"2025-12-05T17:05:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:36 crc kubenswrapper[4753]: E1205 17:05:36.024725 4753 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:36Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:36Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6cf3541b-b15a-4035-99a1-dd4a7b86e07c\\\",\\\"systemUUID\\\":\\\"6ae126b1-60b3-4aa4-8711-3da4c1b89426\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:36Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:36 crc kubenswrapper[4753]: I1205 17:05:36.027652 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:36 crc kubenswrapper[4753]: I1205 17:05:36.027677 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:36 crc kubenswrapper[4753]: I1205 17:05:36.027686 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:36 crc kubenswrapper[4753]: I1205 17:05:36.027699 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:36 crc kubenswrapper[4753]: I1205 17:05:36.027709 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:36Z","lastTransitionTime":"2025-12-05T17:05:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:36 crc kubenswrapper[4753]: E1205 17:05:36.040495 4753 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:36Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:36Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6cf3541b-b15a-4035-99a1-dd4a7b86e07c\\\",\\\"systemUUID\\\":\\\"6ae126b1-60b3-4aa4-8711-3da4c1b89426\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:36Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:36 crc kubenswrapper[4753]: I1205 17:05:36.044374 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:36 crc kubenswrapper[4753]: I1205 17:05:36.044426 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:36 crc kubenswrapper[4753]: I1205 17:05:36.044438 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:36 crc kubenswrapper[4753]: I1205 17:05:36.044454 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:36 crc kubenswrapper[4753]: I1205 17:05:36.044464 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:36Z","lastTransitionTime":"2025-12-05T17:05:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:36 crc kubenswrapper[4753]: E1205 17:05:36.055706 4753 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:36Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:36Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6cf3541b-b15a-4035-99a1-dd4a7b86e07c\\\",\\\"systemUUID\\\":\\\"6ae126b1-60b3-4aa4-8711-3da4c1b89426\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:36Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:36 crc kubenswrapper[4753]: I1205 17:05:36.060484 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:36 crc kubenswrapper[4753]: I1205 17:05:36.060542 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:36 crc kubenswrapper[4753]: I1205 17:05:36.060560 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:36 crc kubenswrapper[4753]: I1205 17:05:36.060581 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:36 crc kubenswrapper[4753]: I1205 17:05:36.060594 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:36Z","lastTransitionTime":"2025-12-05T17:05:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:36 crc kubenswrapper[4753]: E1205 17:05:36.076650 4753 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:36Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:36Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:36Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6cf3541b-b15a-4035-99a1-dd4a7b86e07c\\\",\\\"systemUUID\\\":\\\"6ae126b1-60b3-4aa4-8711-3da4c1b89426\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:36Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:36 crc kubenswrapper[4753]: E1205 17:05:36.076774 4753 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 17:05:36 crc kubenswrapper[4753]: I1205 17:05:36.078156 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:36 crc kubenswrapper[4753]: I1205 17:05:36.078188 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:36 crc kubenswrapper[4753]: I1205 17:05:36.078202 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:36 crc kubenswrapper[4753]: I1205 17:05:36.078220 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:36 crc kubenswrapper[4753]: I1205 17:05:36.078232 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:36Z","lastTransitionTime":"2025-12-05T17:05:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:36 crc kubenswrapper[4753]: I1205 17:05:36.181087 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:36 crc kubenswrapper[4753]: I1205 17:05:36.181133 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:36 crc kubenswrapper[4753]: I1205 17:05:36.181162 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:36 crc kubenswrapper[4753]: I1205 17:05:36.181180 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:36 crc kubenswrapper[4753]: I1205 17:05:36.181193 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:36Z","lastTransitionTime":"2025-12-05T17:05:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:36 crc kubenswrapper[4753]: I1205 17:05:36.284136 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:36 crc kubenswrapper[4753]: I1205 17:05:36.284208 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:36 crc kubenswrapper[4753]: I1205 17:05:36.284224 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:36 crc kubenswrapper[4753]: I1205 17:05:36.284242 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:36 crc kubenswrapper[4753]: I1205 17:05:36.284253 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:36Z","lastTransitionTime":"2025-12-05T17:05:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:36 crc kubenswrapper[4753]: I1205 17:05:36.386520 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:36 crc kubenswrapper[4753]: I1205 17:05:36.386556 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:36 crc kubenswrapper[4753]: I1205 17:05:36.386565 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:36 crc kubenswrapper[4753]: I1205 17:05:36.386581 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:36 crc kubenswrapper[4753]: I1205 17:05:36.386591 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:36Z","lastTransitionTime":"2025-12-05T17:05:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:36 crc kubenswrapper[4753]: I1205 17:05:36.488852 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:36 crc kubenswrapper[4753]: I1205 17:05:36.488902 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:36 crc kubenswrapper[4753]: I1205 17:05:36.488915 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:36 crc kubenswrapper[4753]: I1205 17:05:36.488936 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:36 crc kubenswrapper[4753]: I1205 17:05:36.488948 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:36Z","lastTransitionTime":"2025-12-05T17:05:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:36 crc kubenswrapper[4753]: I1205 17:05:36.591410 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:36 crc kubenswrapper[4753]: I1205 17:05:36.591459 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:36 crc kubenswrapper[4753]: I1205 17:05:36.591472 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:36 crc kubenswrapper[4753]: I1205 17:05:36.591489 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:36 crc kubenswrapper[4753]: I1205 17:05:36.591501 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:36Z","lastTransitionTime":"2025-12-05T17:05:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:36 crc kubenswrapper[4753]: I1205 17:05:36.693319 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:36 crc kubenswrapper[4753]: I1205 17:05:36.693353 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:36 crc kubenswrapper[4753]: I1205 17:05:36.693362 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:36 crc kubenswrapper[4753]: I1205 17:05:36.693376 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:36 crc kubenswrapper[4753]: I1205 17:05:36.693385 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:36Z","lastTransitionTime":"2025-12-05T17:05:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:36 crc kubenswrapper[4753]: I1205 17:05:36.719780 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:05:36 crc kubenswrapper[4753]: I1205 17:05:36.719817 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jjgfd" Dec 05 17:05:36 crc kubenswrapper[4753]: I1205 17:05:36.719780 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:05:36 crc kubenswrapper[4753]: E1205 17:05:36.719926 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jjgfd" podUID="00ab636b-9cc9-4a6f-8e6e-6442b35280ca" Dec 05 17:05:36 crc kubenswrapper[4753]: E1205 17:05:36.719973 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:05:36 crc kubenswrapper[4753]: E1205 17:05:36.720081 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:05:36 crc kubenswrapper[4753]: I1205 17:05:36.795305 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:36 crc kubenswrapper[4753]: I1205 17:05:36.795368 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:36 crc kubenswrapper[4753]: I1205 17:05:36.795390 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:36 crc kubenswrapper[4753]: I1205 17:05:36.795419 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:36 crc kubenswrapper[4753]: I1205 17:05:36.795445 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:36Z","lastTransitionTime":"2025-12-05T17:05:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:36 crc kubenswrapper[4753]: I1205 17:05:36.898104 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:36 crc kubenswrapper[4753]: I1205 17:05:36.898161 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:36 crc kubenswrapper[4753]: I1205 17:05:36.898197 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:36 crc kubenswrapper[4753]: I1205 17:05:36.898216 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:36 crc kubenswrapper[4753]: I1205 17:05:36.898227 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:36Z","lastTransitionTime":"2025-12-05T17:05:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:37 crc kubenswrapper[4753]: I1205 17:05:37.000349 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:37 crc kubenswrapper[4753]: I1205 17:05:37.000384 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:37 crc kubenswrapper[4753]: I1205 17:05:37.000395 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:37 crc kubenswrapper[4753]: I1205 17:05:37.000410 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:37 crc kubenswrapper[4753]: I1205 17:05:37.000419 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:37Z","lastTransitionTime":"2025-12-05T17:05:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:37 crc kubenswrapper[4753]: I1205 17:05:37.102753 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:37 crc kubenswrapper[4753]: I1205 17:05:37.102793 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:37 crc kubenswrapper[4753]: I1205 17:05:37.102805 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:37 crc kubenswrapper[4753]: I1205 17:05:37.102820 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:37 crc kubenswrapper[4753]: I1205 17:05:37.102831 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:37Z","lastTransitionTime":"2025-12-05T17:05:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:37 crc kubenswrapper[4753]: I1205 17:05:37.205892 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:37 crc kubenswrapper[4753]: I1205 17:05:37.205974 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:37 crc kubenswrapper[4753]: I1205 17:05:37.205994 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:37 crc kubenswrapper[4753]: I1205 17:05:37.206027 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:37 crc kubenswrapper[4753]: I1205 17:05:37.206070 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:37Z","lastTransitionTime":"2025-12-05T17:05:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:37 crc kubenswrapper[4753]: I1205 17:05:37.308921 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:37 crc kubenswrapper[4753]: I1205 17:05:37.309027 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:37 crc kubenswrapper[4753]: I1205 17:05:37.309058 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:37 crc kubenswrapper[4753]: I1205 17:05:37.309099 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:37 crc kubenswrapper[4753]: I1205 17:05:37.309127 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:37Z","lastTransitionTime":"2025-12-05T17:05:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:37 crc kubenswrapper[4753]: I1205 17:05:37.412433 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:37 crc kubenswrapper[4753]: I1205 17:05:37.412506 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:37 crc kubenswrapper[4753]: I1205 17:05:37.412525 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:37 crc kubenswrapper[4753]: I1205 17:05:37.412551 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:37 crc kubenswrapper[4753]: I1205 17:05:37.412572 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:37Z","lastTransitionTime":"2025-12-05T17:05:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:37 crc kubenswrapper[4753]: I1205 17:05:37.516428 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:37 crc kubenswrapper[4753]: I1205 17:05:37.516489 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:37 crc kubenswrapper[4753]: I1205 17:05:37.516502 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:37 crc kubenswrapper[4753]: I1205 17:05:37.516521 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:37 crc kubenswrapper[4753]: I1205 17:05:37.516531 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:37Z","lastTransitionTime":"2025-12-05T17:05:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:37 crc kubenswrapper[4753]: I1205 17:05:37.619744 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:37 crc kubenswrapper[4753]: I1205 17:05:37.619813 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:37 crc kubenswrapper[4753]: I1205 17:05:37.619831 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:37 crc kubenswrapper[4753]: I1205 17:05:37.619858 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:37 crc kubenswrapper[4753]: I1205 17:05:37.619878 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:37Z","lastTransitionTime":"2025-12-05T17:05:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:37 crc kubenswrapper[4753]: I1205 17:05:37.720535 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:05:37 crc kubenswrapper[4753]: E1205 17:05:37.720799 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:05:37 crc kubenswrapper[4753]: I1205 17:05:37.723121 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:37 crc kubenswrapper[4753]: I1205 17:05:37.723235 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:37 crc kubenswrapper[4753]: I1205 17:05:37.723257 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:37 crc kubenswrapper[4753]: I1205 17:05:37.723281 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:37 crc kubenswrapper[4753]: I1205 17:05:37.723304 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:37Z","lastTransitionTime":"2025-12-05T17:05:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:37 crc kubenswrapper[4753]: I1205 17:05:37.826996 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:37 crc kubenswrapper[4753]: I1205 17:05:37.827074 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:37 crc kubenswrapper[4753]: I1205 17:05:37.827093 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:37 crc kubenswrapper[4753]: I1205 17:05:37.827122 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:37 crc kubenswrapper[4753]: I1205 17:05:37.827142 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:37Z","lastTransitionTime":"2025-12-05T17:05:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:37 crc kubenswrapper[4753]: I1205 17:05:37.930439 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:37 crc kubenswrapper[4753]: I1205 17:05:37.930482 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:37 crc kubenswrapper[4753]: I1205 17:05:37.930512 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:37 crc kubenswrapper[4753]: I1205 17:05:37.930526 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:37 crc kubenswrapper[4753]: I1205 17:05:37.930537 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:37Z","lastTransitionTime":"2025-12-05T17:05:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:38 crc kubenswrapper[4753]: I1205 17:05:38.034617 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:38 crc kubenswrapper[4753]: I1205 17:05:38.034667 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:38 crc kubenswrapper[4753]: I1205 17:05:38.034677 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:38 crc kubenswrapper[4753]: I1205 17:05:38.034694 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:38 crc kubenswrapper[4753]: I1205 17:05:38.034704 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:38Z","lastTransitionTime":"2025-12-05T17:05:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:38 crc kubenswrapper[4753]: I1205 17:05:38.137068 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:38 crc kubenswrapper[4753]: I1205 17:05:38.137116 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:38 crc kubenswrapper[4753]: I1205 17:05:38.137133 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:38 crc kubenswrapper[4753]: I1205 17:05:38.137178 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:38 crc kubenswrapper[4753]: I1205 17:05:38.137191 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:38Z","lastTransitionTime":"2025-12-05T17:05:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:38 crc kubenswrapper[4753]: I1205 17:05:38.240236 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:38 crc kubenswrapper[4753]: I1205 17:05:38.240272 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:38 crc kubenswrapper[4753]: I1205 17:05:38.240283 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:38 crc kubenswrapper[4753]: I1205 17:05:38.240300 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:38 crc kubenswrapper[4753]: I1205 17:05:38.240313 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:38Z","lastTransitionTime":"2025-12-05T17:05:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:38 crc kubenswrapper[4753]: I1205 17:05:38.343640 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:38 crc kubenswrapper[4753]: I1205 17:05:38.343687 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:38 crc kubenswrapper[4753]: I1205 17:05:38.343698 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:38 crc kubenswrapper[4753]: I1205 17:05:38.343716 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:38 crc kubenswrapper[4753]: I1205 17:05:38.343727 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:38Z","lastTransitionTime":"2025-12-05T17:05:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:38 crc kubenswrapper[4753]: I1205 17:05:38.447897 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:38 crc kubenswrapper[4753]: I1205 17:05:38.447957 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:38 crc kubenswrapper[4753]: I1205 17:05:38.447971 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:38 crc kubenswrapper[4753]: I1205 17:05:38.448001 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:38 crc kubenswrapper[4753]: I1205 17:05:38.448020 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:38Z","lastTransitionTime":"2025-12-05T17:05:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:38 crc kubenswrapper[4753]: I1205 17:05:38.551560 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:38 crc kubenswrapper[4753]: I1205 17:05:38.551665 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:38 crc kubenswrapper[4753]: I1205 17:05:38.551682 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:38 crc kubenswrapper[4753]: I1205 17:05:38.551708 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:38 crc kubenswrapper[4753]: I1205 17:05:38.551725 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:38Z","lastTransitionTime":"2025-12-05T17:05:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:38 crc kubenswrapper[4753]: I1205 17:05:38.655821 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:38 crc kubenswrapper[4753]: I1205 17:05:38.655896 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:38 crc kubenswrapper[4753]: I1205 17:05:38.655916 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:38 crc kubenswrapper[4753]: I1205 17:05:38.655951 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:38 crc kubenswrapper[4753]: I1205 17:05:38.655983 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:38Z","lastTransitionTime":"2025-12-05T17:05:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:38 crc kubenswrapper[4753]: I1205 17:05:38.720577 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jjgfd" Dec 05 17:05:38 crc kubenswrapper[4753]: I1205 17:05:38.720643 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:05:38 crc kubenswrapper[4753]: I1205 17:05:38.720577 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:05:38 crc kubenswrapper[4753]: E1205 17:05:38.720796 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jjgfd" podUID="00ab636b-9cc9-4a6f-8e6e-6442b35280ca" Dec 05 17:05:38 crc kubenswrapper[4753]: E1205 17:05:38.720930 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:05:38 crc kubenswrapper[4753]: E1205 17:05:38.721051 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:05:38 crc kubenswrapper[4753]: I1205 17:05:38.759475 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:38 crc kubenswrapper[4753]: I1205 17:05:38.759551 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:38 crc kubenswrapper[4753]: I1205 17:05:38.759563 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:38 crc kubenswrapper[4753]: I1205 17:05:38.759582 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:38 crc kubenswrapper[4753]: I1205 17:05:38.759613 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:38Z","lastTransitionTime":"2025-12-05T17:05:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:38 crc kubenswrapper[4753]: I1205 17:05:38.863472 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:38 crc kubenswrapper[4753]: I1205 17:05:38.863555 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:38 crc kubenswrapper[4753]: I1205 17:05:38.863580 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:38 crc kubenswrapper[4753]: I1205 17:05:38.863615 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:38 crc kubenswrapper[4753]: I1205 17:05:38.863645 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:38Z","lastTransitionTime":"2025-12-05T17:05:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:38 crc kubenswrapper[4753]: I1205 17:05:38.967792 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:38 crc kubenswrapper[4753]: I1205 17:05:38.967847 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:38 crc kubenswrapper[4753]: I1205 17:05:38.967863 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:38 crc kubenswrapper[4753]: I1205 17:05:38.967890 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:38 crc kubenswrapper[4753]: I1205 17:05:38.967904 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:38Z","lastTransitionTime":"2025-12-05T17:05:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:39 crc kubenswrapper[4753]: I1205 17:05:39.071582 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:39 crc kubenswrapper[4753]: I1205 17:05:39.071652 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:39 crc kubenswrapper[4753]: I1205 17:05:39.071663 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:39 crc kubenswrapper[4753]: I1205 17:05:39.071677 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:39 crc kubenswrapper[4753]: I1205 17:05:39.071686 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:39Z","lastTransitionTime":"2025-12-05T17:05:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:39 crc kubenswrapper[4753]: I1205 17:05:39.174167 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:39 crc kubenswrapper[4753]: I1205 17:05:39.174234 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:39 crc kubenswrapper[4753]: I1205 17:05:39.174250 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:39 crc kubenswrapper[4753]: I1205 17:05:39.174276 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:39 crc kubenswrapper[4753]: I1205 17:05:39.174294 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:39Z","lastTransitionTime":"2025-12-05T17:05:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:39 crc kubenswrapper[4753]: I1205 17:05:39.277696 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:39 crc kubenswrapper[4753]: I1205 17:05:39.277775 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:39 crc kubenswrapper[4753]: I1205 17:05:39.277793 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:39 crc kubenswrapper[4753]: I1205 17:05:39.277827 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:39 crc kubenswrapper[4753]: I1205 17:05:39.277850 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:39Z","lastTransitionTime":"2025-12-05T17:05:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:39 crc kubenswrapper[4753]: I1205 17:05:39.380574 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:39 crc kubenswrapper[4753]: I1205 17:05:39.380664 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:39 crc kubenswrapper[4753]: I1205 17:05:39.380684 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:39 crc kubenswrapper[4753]: I1205 17:05:39.380717 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:39 crc kubenswrapper[4753]: I1205 17:05:39.380739 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:39Z","lastTransitionTime":"2025-12-05T17:05:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:39 crc kubenswrapper[4753]: I1205 17:05:39.484047 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:39 crc kubenswrapper[4753]: I1205 17:05:39.484135 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:39 crc kubenswrapper[4753]: I1205 17:05:39.484195 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:39 crc kubenswrapper[4753]: I1205 17:05:39.484231 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:39 crc kubenswrapper[4753]: I1205 17:05:39.484255 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:39Z","lastTransitionTime":"2025-12-05T17:05:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:39 crc kubenswrapper[4753]: I1205 17:05:39.587425 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:39 crc kubenswrapper[4753]: I1205 17:05:39.587509 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:39 crc kubenswrapper[4753]: I1205 17:05:39.587529 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:39 crc kubenswrapper[4753]: I1205 17:05:39.587557 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:39 crc kubenswrapper[4753]: I1205 17:05:39.587577 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:39Z","lastTransitionTime":"2025-12-05T17:05:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:39 crc kubenswrapper[4753]: I1205 17:05:39.696692 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:39 crc kubenswrapper[4753]: I1205 17:05:39.696759 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:39 crc kubenswrapper[4753]: I1205 17:05:39.696777 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:39 crc kubenswrapper[4753]: I1205 17:05:39.696802 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:39 crc kubenswrapper[4753]: I1205 17:05:39.696822 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:39Z","lastTransitionTime":"2025-12-05T17:05:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:39 crc kubenswrapper[4753]: I1205 17:05:39.720330 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:05:39 crc kubenswrapper[4753]: E1205 17:05:39.720518 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:05:39 crc kubenswrapper[4753]: I1205 17:05:39.800255 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:39 crc kubenswrapper[4753]: I1205 17:05:39.800322 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:39 crc kubenswrapper[4753]: I1205 17:05:39.800340 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:39 crc kubenswrapper[4753]: I1205 17:05:39.800367 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:39 crc kubenswrapper[4753]: I1205 17:05:39.800386 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:39Z","lastTransitionTime":"2025-12-05T17:05:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:39 crc kubenswrapper[4753]: I1205 17:05:39.903016 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:39 crc kubenswrapper[4753]: I1205 17:05:39.903102 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:39 crc kubenswrapper[4753]: I1205 17:05:39.903128 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:39 crc kubenswrapper[4753]: I1205 17:05:39.903216 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:39 crc kubenswrapper[4753]: I1205 17:05:39.903246 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:39Z","lastTransitionTime":"2025-12-05T17:05:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:40 crc kubenswrapper[4753]: I1205 17:05:40.008132 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:40 crc kubenswrapper[4753]: I1205 17:05:40.008246 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:40 crc kubenswrapper[4753]: I1205 17:05:40.008265 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:40 crc kubenswrapper[4753]: I1205 17:05:40.008295 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:40 crc kubenswrapper[4753]: I1205 17:05:40.008314 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:40Z","lastTransitionTime":"2025-12-05T17:05:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:40 crc kubenswrapper[4753]: I1205 17:05:40.112060 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:40 crc kubenswrapper[4753]: I1205 17:05:40.112143 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:40 crc kubenswrapper[4753]: I1205 17:05:40.112198 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:40 crc kubenswrapper[4753]: I1205 17:05:40.112231 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:40 crc kubenswrapper[4753]: I1205 17:05:40.112254 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:40Z","lastTransitionTime":"2025-12-05T17:05:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:40 crc kubenswrapper[4753]: I1205 17:05:40.215521 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:40 crc kubenswrapper[4753]: I1205 17:05:40.215610 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:40 crc kubenswrapper[4753]: I1205 17:05:40.215648 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:40 crc kubenswrapper[4753]: I1205 17:05:40.215672 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:40 crc kubenswrapper[4753]: I1205 17:05:40.215688 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:40Z","lastTransitionTime":"2025-12-05T17:05:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:40 crc kubenswrapper[4753]: I1205 17:05:40.320144 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:40 crc kubenswrapper[4753]: I1205 17:05:40.320355 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:40 crc kubenswrapper[4753]: I1205 17:05:40.320437 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:40 crc kubenswrapper[4753]: I1205 17:05:40.320508 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:40 crc kubenswrapper[4753]: I1205 17:05:40.320531 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:40Z","lastTransitionTime":"2025-12-05T17:05:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:40 crc kubenswrapper[4753]: I1205 17:05:40.423540 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:40 crc kubenswrapper[4753]: I1205 17:05:40.424019 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:40 crc kubenswrapper[4753]: I1205 17:05:40.424256 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:40 crc kubenswrapper[4753]: I1205 17:05:40.424495 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:40 crc kubenswrapper[4753]: I1205 17:05:40.424709 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:40Z","lastTransitionTime":"2025-12-05T17:05:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:40 crc kubenswrapper[4753]: I1205 17:05:40.528424 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:40 crc kubenswrapper[4753]: I1205 17:05:40.528902 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:40 crc kubenswrapper[4753]: I1205 17:05:40.529070 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:40 crc kubenswrapper[4753]: I1205 17:05:40.529277 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:40 crc kubenswrapper[4753]: I1205 17:05:40.529443 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:40Z","lastTransitionTime":"2025-12-05T17:05:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:40 crc kubenswrapper[4753]: I1205 17:05:40.632243 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:40 crc kubenswrapper[4753]: I1205 17:05:40.632861 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:40 crc kubenswrapper[4753]: I1205 17:05:40.633096 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:40 crc kubenswrapper[4753]: I1205 17:05:40.633334 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:40 crc kubenswrapper[4753]: I1205 17:05:40.633508 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:40Z","lastTransitionTime":"2025-12-05T17:05:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:40 crc kubenswrapper[4753]: I1205 17:05:40.720272 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:05:40 crc kubenswrapper[4753]: I1205 17:05:40.720288 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jjgfd" Dec 05 17:05:40 crc kubenswrapper[4753]: E1205 17:05:40.720455 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:05:40 crc kubenswrapper[4753]: E1205 17:05:40.720548 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jjgfd" podUID="00ab636b-9cc9-4a6f-8e6e-6442b35280ca" Dec 05 17:05:40 crc kubenswrapper[4753]: I1205 17:05:40.721399 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:05:40 crc kubenswrapper[4753]: E1205 17:05:40.721787 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:05:40 crc kubenswrapper[4753]: I1205 17:05:40.736949 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:40 crc kubenswrapper[4753]: I1205 17:05:40.737182 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:40 crc kubenswrapper[4753]: I1205 17:05:40.737361 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:40 crc kubenswrapper[4753]: I1205 17:05:40.737517 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:40 crc kubenswrapper[4753]: I1205 17:05:40.737658 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:40Z","lastTransitionTime":"2025-12-05T17:05:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:40 crc kubenswrapper[4753]: I1205 17:05:40.840612 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:40 crc kubenswrapper[4753]: I1205 17:05:40.840662 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:40 crc kubenswrapper[4753]: I1205 17:05:40.840672 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:40 crc kubenswrapper[4753]: I1205 17:05:40.840689 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:40 crc kubenswrapper[4753]: I1205 17:05:40.840701 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:40Z","lastTransitionTime":"2025-12-05T17:05:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:40 crc kubenswrapper[4753]: I1205 17:05:40.942833 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:40 crc kubenswrapper[4753]: I1205 17:05:40.942887 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:40 crc kubenswrapper[4753]: I1205 17:05:40.942898 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:40 crc kubenswrapper[4753]: I1205 17:05:40.942912 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:40 crc kubenswrapper[4753]: I1205 17:05:40.942921 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:40Z","lastTransitionTime":"2025-12-05T17:05:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:41 crc kubenswrapper[4753]: I1205 17:05:41.045304 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:41 crc kubenswrapper[4753]: I1205 17:05:41.045381 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:41 crc kubenswrapper[4753]: I1205 17:05:41.045421 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:41 crc kubenswrapper[4753]: I1205 17:05:41.045444 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:41 crc kubenswrapper[4753]: I1205 17:05:41.045458 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:41Z","lastTransitionTime":"2025-12-05T17:05:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:41 crc kubenswrapper[4753]: I1205 17:05:41.148688 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:41 crc kubenswrapper[4753]: I1205 17:05:41.148761 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:41 crc kubenswrapper[4753]: I1205 17:05:41.148774 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:41 crc kubenswrapper[4753]: I1205 17:05:41.148793 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:41 crc kubenswrapper[4753]: I1205 17:05:41.148808 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:41Z","lastTransitionTime":"2025-12-05T17:05:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:41 crc kubenswrapper[4753]: I1205 17:05:41.251553 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:41 crc kubenswrapper[4753]: I1205 17:05:41.251593 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:41 crc kubenswrapper[4753]: I1205 17:05:41.251601 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:41 crc kubenswrapper[4753]: I1205 17:05:41.251616 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:41 crc kubenswrapper[4753]: I1205 17:05:41.251625 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:41Z","lastTransitionTime":"2025-12-05T17:05:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:41 crc kubenswrapper[4753]: I1205 17:05:41.354459 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:41 crc kubenswrapper[4753]: I1205 17:05:41.354526 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:41 crc kubenswrapper[4753]: I1205 17:05:41.354539 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:41 crc kubenswrapper[4753]: I1205 17:05:41.354558 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:41 crc kubenswrapper[4753]: I1205 17:05:41.354574 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:41Z","lastTransitionTime":"2025-12-05T17:05:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:41 crc kubenswrapper[4753]: I1205 17:05:41.458217 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:41 crc kubenswrapper[4753]: I1205 17:05:41.458254 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:41 crc kubenswrapper[4753]: I1205 17:05:41.458263 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:41 crc kubenswrapper[4753]: I1205 17:05:41.458277 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:41 crc kubenswrapper[4753]: I1205 17:05:41.458286 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:41Z","lastTransitionTime":"2025-12-05T17:05:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:41 crc kubenswrapper[4753]: I1205 17:05:41.561783 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:41 crc kubenswrapper[4753]: I1205 17:05:41.561841 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:41 crc kubenswrapper[4753]: I1205 17:05:41.561859 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:41 crc kubenswrapper[4753]: I1205 17:05:41.561893 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:41 crc kubenswrapper[4753]: I1205 17:05:41.561930 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:41Z","lastTransitionTime":"2025-12-05T17:05:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:41 crc kubenswrapper[4753]: I1205 17:05:41.664432 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:41 crc kubenswrapper[4753]: I1205 17:05:41.664461 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:41 crc kubenswrapper[4753]: I1205 17:05:41.664469 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:41 crc kubenswrapper[4753]: I1205 17:05:41.664482 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:41 crc kubenswrapper[4753]: I1205 17:05:41.664495 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:41Z","lastTransitionTime":"2025-12-05T17:05:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:41 crc kubenswrapper[4753]: I1205 17:05:41.720254 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:05:41 crc kubenswrapper[4753]: E1205 17:05:41.720378 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:05:41 crc kubenswrapper[4753]: I1205 17:05:41.735651 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65284411-042e-46b9-8e55-cabc9e78a397\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e83b48779d4b8b709f9bd7351180040315002f738ada5ab034c883f87ef8734\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f6e27e109eeb52dae4e4b35bdf84e78f0786fc8d15c2498dd8d8c70c3d64e4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7886d72b05768ec1d515cfb2837f52ae16e82445b72a3016762308b80892d55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a662a8ca2ca9bcf02ace463bf26e44293b9776c69f3f1f84110392c3cab6e83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:41Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:41 crc kubenswrapper[4753]: I1205 17:05:41.752354 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fb6627d3-0817-4737-b5c7-00b2abc67b0f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bba91e82256742fa5b98e2e481b8ab267ce89d71e180581f34fa371d03bbc1c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf90148205c7fde7c73b3ed1130fa5c9fc4b6562c0e1f4f6b009a72677f32f22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95027e80190e6caa0524ede7ef31c38c8861aeca94936b1a7a819fdc650c7969\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1bd3444395954ccae8d5c8137c6ebd7f62a6c864cea89f98a137af3e26cda8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1bd3444395954ccae8d5c8137c6ebd7f62a6c864cea89f98a137af3e26cda8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:41Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:41 crc kubenswrapper[4753]: I1205 17:05:41.767588 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:41 crc kubenswrapper[4753]: I1205 17:05:41.767629 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:41 crc kubenswrapper[4753]: I1205 17:05:41.767649 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:41 crc kubenswrapper[4753]: I1205 17:05:41.767677 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:41 crc kubenswrapper[4753]: I1205 17:05:41.767693 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:41Z","lastTransitionTime":"2025-12-05T17:05:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:41 crc kubenswrapper[4753]: I1205 17:05:41.770141 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2rg4s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"751d4d21-4eb8-4236-bca2-d81f094ff2f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bda355ba818e87604cf7cfba53f0ee3116f2d4234e7d9631ec7e6571ed34030\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa8d4e5672a07ea28ddba02eaaf603750cd3067ff47165776d2dc7f2ad42a290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa8d4e5672a07ea28ddba02eaaf603750cd3067ff47165776d2dc7f2ad42a290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87e685f0d2c857d143c3ee4b13adf2afe6796c40ae87e2a304b78783907a785a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87e685f0d2c857d143c3ee4b13adf2afe6796c40ae87e2a304b78783907a785a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://842a75d6b100069785dbaf413df3b610a5df28c9ec124100eb31c965985282fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://842a75d6b100069785dbaf413df3b610a5df28c9ec124100eb31c965985282fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4ac00f8ef346db3de91e7bec01a56eac9f27d5a501d6f686173ece4897b9694\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4ac00f8ef346db3de91e7bec01a56eac9f27d5a501d6f686173ece4897b9694\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3585437cbe0f17e5bf92a74123f0e6f13475b6e5d49b145f2b9b869559b53310\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3585437cbe0f17e5bf92a74123f0e6f13475b6e5d49b145f2b9b869559b53310\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e60aa96f2377e63e44e932efe51f79832b778b8ef41fd47e728a80286956c62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e60aa96f2377e63e44e932efe51f79832b778b8ef41fd47e728a80286956c62\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2rg4s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:41Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:41 crc kubenswrapper[4753]: I1205 17:05:41.791451 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f6585b3b62fc4a78f89b3413326d6d5259cb8c338936f2d5def6185d81d46f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cb72322d436f10b006a4bb7a91b255451aba90e86100fdf249be6443159bd89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d0273d889da0bd6ea3db8040da2eab62fc750f308cb9a62dee71b6c9f2fadd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e79e99eb44ff95c2f9929326313e41629e6e03ebc068537bd27bc4c89ad5b0ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20bcab2c2912120c819bacdf478db82a78500908125e6efadaef1f3409eb0d49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ead15f2a3179f03d770beadcaa1374561d5aebb4874615a334edccff0c2f300\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b6e87477fcc60efe5e27b6865aa0f91f58bb756f255afe613932a906bd6047a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b6e87477fcc60efe5e27b6865aa0f91f58bb756f255afe613932a906bd6047a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:05:17Z\\\",\\\"message\\\":\\\"all/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 17:05:17.523236 6490 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 17:05:17.523267 6490 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1205 17:05:17.523298 6490 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1205 17:05:17.523307 6490 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1205 17:05:17.523331 6490 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1205 17:05:17.523345 6490 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1205 17:05:17.523344 6490 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1205 17:05:17.523350 6490 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1205 17:05:17.523358 6490 handler.go:208] Removed *v1.Node event handler 7\\\\nI1205 17:05:17.523364 6490 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 17:05:17.523371 6490 handler.go:208] Removed *v1.Node event handler 2\\\\nI1205 17:05:17.523370 6490 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 17:05:17.523388 6490 factory.go:656] Stopping watch factory\\\\nI1205 17:05:17.523400 6490 ovnkube.go:599] Stopped ovnkube\\\\nI1205 17:05:17.523437 6490 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 17\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:05:16Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-98fvv_openshift-ovn-kubernetes(f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://899db0b5d45776b0560a2908ce63ec8ea2b458e2e492189737e930688b4ed2e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://073ffcef7324b498f2cb47680b64bf44e474a7732a5c2b34661da22c3e0f6c17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://073ffcef7324b498f2cb47680b64bf44e474a7732a5c2b34661da22c3e0f6c17\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-98fvv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:41Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:41 crc kubenswrapper[4753]: I1205 17:05:41.804532 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-f6qn6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"693c26bb-e75f-4f7f-bd2a-bdf7dcb0af06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02ea0085a961c324f4a426d68972043f4b24b3aa03aed623ebc46bbafc857d1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g4w6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-f6qn6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:41Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:41 crc kubenswrapper[4753]: I1205 17:05:41.818299 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bhvk4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"338d3da9-209c-4ca9-a37d-6ea5731d1622\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7747e4738055a7b994119bad9b30d47e0b510c7407c53df29b674c553dbbcbb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qh6zg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://097885ac231f081a81d51cf0091df6f93ca49068340438454db429194e5475b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qh6zg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-bhvk4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:41Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:41 crc kubenswrapper[4753]: I1205 17:05:41.831312 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78c3b80f4d08dc9fd4215fc87bbd7331f765f301fa587f5fd4ded54cd8db6587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60c5d0ca3d26171050d592c3d76f0db4ca1a12344c40825a8e7c5fd579814c80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-khn68\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:41Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:41 crc kubenswrapper[4753]: I1205 17:05:41.843941 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-jjgfd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"00ab636b-9cc9-4a6f-8e6e-6442b35280ca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhlg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhlg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:57Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-jjgfd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:41Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:41 crc kubenswrapper[4753]: I1205 17:05:41.857603 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://408690fbd818b50273c914cc463acec30ff77bbf7d26680bffa2451a1d1ddcf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:41Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:41 crc kubenswrapper[4753]: I1205 17:05:41.870223 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8934d1ec7490b421708e4678d69a55cdd381563ada3d945d066fcf4abba49f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e799acff1e50a664e95e30896e435bc77a35c8560044c5f180e16c3534bcf828\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:41Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:41 crc kubenswrapper[4753]: I1205 17:05:41.872502 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:41 crc kubenswrapper[4753]: I1205 17:05:41.872525 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:41 crc kubenswrapper[4753]: I1205 17:05:41.872532 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:41 crc kubenswrapper[4753]: I1205 17:05:41.872547 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:41 crc kubenswrapper[4753]: I1205 17:05:41.872559 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:41Z","lastTransitionTime":"2025-12-05T17:05:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:41 crc kubenswrapper[4753]: I1205 17:05:41.882828 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c52dbe30298514a572fd7cf4a301357ef0290af5894be7f9d08aff6c3fbe85a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:41Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:41 crc kubenswrapper[4753]: I1205 17:05:41.895485 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hpl8r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1096ebd14458819c0dbe4f5a86f2812eb19236d7d5062bfe2eda62ecdc05c55e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b2cc49130697a15123f202462d2f0781903baea370d4906d22d1b39c23320d7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:05:30Z\\\",\\\"message\\\":\\\"2025-12-05T17:04:44+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_2d7d6f8b-5939-404d-a339-48cbe066dc8a\\\\n2025-12-05T17:04:44+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_2d7d6f8b-5939-404d-a339-48cbe066dc8a to /host/opt/cni/bin/\\\\n2025-12-05T17:04:44Z [verbose] multus-daemon started\\\\n2025-12-05T17:04:44Z [verbose] Readiness Indicator file check\\\\n2025-12-05T17:05:29Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:05:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmzrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hpl8r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:41Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:41 crc kubenswrapper[4753]: I1205 17:05:41.909695 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee37fdbe-64d5-4bb1-8522-932d83e0648e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://195b8bf18e6cf937def214e377ec9def799c19ea4d5f008b66cc42048656ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a5f37b26bfe7933088e08379662250e7b188efbbb5784d49e8d86262e10416\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0061bf86592b301a0a9398d2938e614f76616ed7ddcf310aa50c961632fb58b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20eb41ef00b7711fdfef0e4f3bd83b2fbdb193be96cafc0ef87f048271e7dd78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fea878c0275a3934d26327c13cf5a93ae3b881132788f3d031af5b382bbdd02\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 17:04:35.064086 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 17:04:35.066802 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1773129596/tls.crt::/tmp/serving-cert-1773129596/tls.key\\\\\\\"\\\\nI1205 17:04:40.565607 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 17:04:40.585450 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 17:04:40.585477 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 17:04:40.585506 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 17:04:40.585513 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:04:40.600592 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:04:40.600617 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600622 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600626 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:04:40.600629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:04:40.600632 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:04:40.600635 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:04:40.600704 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 17:04:40.602558 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0585e546af65a61265267b97083556b811ad35ee6c48fa84262b3f4f25eaf907\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:41Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:41 crc kubenswrapper[4753]: I1205 17:05:41.923408 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:41Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:41 crc kubenswrapper[4753]: I1205 17:05:41.936818 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:41Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:41 crc kubenswrapper[4753]: I1205 17:05:41.947820 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vj5f7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f38be29-f040-4e7d-9026-36929c0c5cda\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b5d403644e00b367518058ecf22ed9913df9a1353e2fede8802deaa288a48e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc2cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vj5f7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:41Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:41 crc kubenswrapper[4753]: I1205 17:05:41.959856 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:41Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:41 crc kubenswrapper[4753]: I1205 17:05:41.975878 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:41 crc kubenswrapper[4753]: I1205 17:05:41.975918 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:41 crc kubenswrapper[4753]: I1205 17:05:41.975932 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:41 crc kubenswrapper[4753]: I1205 17:05:41.975955 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:41 crc kubenswrapper[4753]: I1205 17:05:41.975970 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:41Z","lastTransitionTime":"2025-12-05T17:05:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:42 crc kubenswrapper[4753]: I1205 17:05:42.079350 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:42 crc kubenswrapper[4753]: I1205 17:05:42.079396 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:42 crc kubenswrapper[4753]: I1205 17:05:42.079409 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:42 crc kubenswrapper[4753]: I1205 17:05:42.079430 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:42 crc kubenswrapper[4753]: I1205 17:05:42.079445 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:42Z","lastTransitionTime":"2025-12-05T17:05:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:42 crc kubenswrapper[4753]: I1205 17:05:42.182391 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:42 crc kubenswrapper[4753]: I1205 17:05:42.182440 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:42 crc kubenswrapper[4753]: I1205 17:05:42.182455 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:42 crc kubenswrapper[4753]: I1205 17:05:42.182478 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:42 crc kubenswrapper[4753]: I1205 17:05:42.182495 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:42Z","lastTransitionTime":"2025-12-05T17:05:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:42 crc kubenswrapper[4753]: I1205 17:05:42.287060 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:42 crc kubenswrapper[4753]: I1205 17:05:42.287132 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:42 crc kubenswrapper[4753]: I1205 17:05:42.287181 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:42 crc kubenswrapper[4753]: I1205 17:05:42.287211 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:42 crc kubenswrapper[4753]: I1205 17:05:42.287230 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:42Z","lastTransitionTime":"2025-12-05T17:05:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:42 crc kubenswrapper[4753]: I1205 17:05:42.390746 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:42 crc kubenswrapper[4753]: I1205 17:05:42.391471 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:42 crc kubenswrapper[4753]: I1205 17:05:42.391517 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:42 crc kubenswrapper[4753]: I1205 17:05:42.391555 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:42 crc kubenswrapper[4753]: I1205 17:05:42.391578 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:42Z","lastTransitionTime":"2025-12-05T17:05:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:42 crc kubenswrapper[4753]: I1205 17:05:42.495680 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:42 crc kubenswrapper[4753]: I1205 17:05:42.495723 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:42 crc kubenswrapper[4753]: I1205 17:05:42.495733 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:42 crc kubenswrapper[4753]: I1205 17:05:42.495750 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:42 crc kubenswrapper[4753]: I1205 17:05:42.495761 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:42Z","lastTransitionTime":"2025-12-05T17:05:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:42 crc kubenswrapper[4753]: I1205 17:05:42.598916 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:42 crc kubenswrapper[4753]: I1205 17:05:42.598998 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:42 crc kubenswrapper[4753]: I1205 17:05:42.599011 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:42 crc kubenswrapper[4753]: I1205 17:05:42.599032 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:42 crc kubenswrapper[4753]: I1205 17:05:42.599046 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:42Z","lastTransitionTime":"2025-12-05T17:05:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:42 crc kubenswrapper[4753]: I1205 17:05:42.702734 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:42 crc kubenswrapper[4753]: I1205 17:05:42.702798 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:42 crc kubenswrapper[4753]: I1205 17:05:42.702811 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:42 crc kubenswrapper[4753]: I1205 17:05:42.702835 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:42 crc kubenswrapper[4753]: I1205 17:05:42.702850 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:42Z","lastTransitionTime":"2025-12-05T17:05:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:42 crc kubenswrapper[4753]: I1205 17:05:42.719741 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:05:42 crc kubenswrapper[4753]: I1205 17:05:42.719800 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jjgfd" Dec 05 17:05:42 crc kubenswrapper[4753]: I1205 17:05:42.719948 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:05:42 crc kubenswrapper[4753]: E1205 17:05:42.720099 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:05:42 crc kubenswrapper[4753]: E1205 17:05:42.720278 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:05:42 crc kubenswrapper[4753]: E1205 17:05:42.720518 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jjgfd" podUID="00ab636b-9cc9-4a6f-8e6e-6442b35280ca" Dec 05 17:05:42 crc kubenswrapper[4753]: I1205 17:05:42.731608 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Dec 05 17:05:42 crc kubenswrapper[4753]: I1205 17:05:42.805586 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:42 crc kubenswrapper[4753]: I1205 17:05:42.805661 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:42 crc kubenswrapper[4753]: I1205 17:05:42.805680 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:42 crc kubenswrapper[4753]: I1205 17:05:42.805708 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:42 crc kubenswrapper[4753]: I1205 17:05:42.805727 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:42Z","lastTransitionTime":"2025-12-05T17:05:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:42 crc kubenswrapper[4753]: I1205 17:05:42.908874 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:42 crc kubenswrapper[4753]: I1205 17:05:42.908922 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:42 crc kubenswrapper[4753]: I1205 17:05:42.908934 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:42 crc kubenswrapper[4753]: I1205 17:05:42.908954 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:42 crc kubenswrapper[4753]: I1205 17:05:42.908984 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:42Z","lastTransitionTime":"2025-12-05T17:05:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:43 crc kubenswrapper[4753]: I1205 17:05:43.011954 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:43 crc kubenswrapper[4753]: I1205 17:05:43.012012 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:43 crc kubenswrapper[4753]: I1205 17:05:43.012024 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:43 crc kubenswrapper[4753]: I1205 17:05:43.012041 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:43 crc kubenswrapper[4753]: I1205 17:05:43.012057 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:43Z","lastTransitionTime":"2025-12-05T17:05:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:43 crc kubenswrapper[4753]: I1205 17:05:43.114870 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:43 crc kubenswrapper[4753]: I1205 17:05:43.114926 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:43 crc kubenswrapper[4753]: I1205 17:05:43.114939 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:43 crc kubenswrapper[4753]: I1205 17:05:43.114958 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:43 crc kubenswrapper[4753]: I1205 17:05:43.114972 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:43Z","lastTransitionTime":"2025-12-05T17:05:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:43 crc kubenswrapper[4753]: I1205 17:05:43.218607 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:43 crc kubenswrapper[4753]: I1205 17:05:43.219305 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:43 crc kubenswrapper[4753]: I1205 17:05:43.219491 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:43 crc kubenswrapper[4753]: I1205 17:05:43.219668 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:43 crc kubenswrapper[4753]: I1205 17:05:43.219827 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:43Z","lastTransitionTime":"2025-12-05T17:05:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:43 crc kubenswrapper[4753]: I1205 17:05:43.323825 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:43 crc kubenswrapper[4753]: I1205 17:05:43.324003 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:43 crc kubenswrapper[4753]: I1205 17:05:43.324032 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:43 crc kubenswrapper[4753]: I1205 17:05:43.324062 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:43 crc kubenswrapper[4753]: I1205 17:05:43.324096 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:43Z","lastTransitionTime":"2025-12-05T17:05:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:43 crc kubenswrapper[4753]: I1205 17:05:43.426968 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:43 crc kubenswrapper[4753]: I1205 17:05:43.427022 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:43 crc kubenswrapper[4753]: I1205 17:05:43.427068 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:43 crc kubenswrapper[4753]: I1205 17:05:43.427089 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:43 crc kubenswrapper[4753]: I1205 17:05:43.427105 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:43Z","lastTransitionTime":"2025-12-05T17:05:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:43 crc kubenswrapper[4753]: I1205 17:05:43.530038 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:43 crc kubenswrapper[4753]: I1205 17:05:43.530100 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:43 crc kubenswrapper[4753]: I1205 17:05:43.530118 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:43 crc kubenswrapper[4753]: I1205 17:05:43.530141 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:43 crc kubenswrapper[4753]: I1205 17:05:43.530177 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:43Z","lastTransitionTime":"2025-12-05T17:05:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:43 crc kubenswrapper[4753]: I1205 17:05:43.633298 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:43 crc kubenswrapper[4753]: I1205 17:05:43.633343 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:43 crc kubenswrapper[4753]: I1205 17:05:43.633361 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:43 crc kubenswrapper[4753]: I1205 17:05:43.633380 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:43 crc kubenswrapper[4753]: I1205 17:05:43.633393 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:43Z","lastTransitionTime":"2025-12-05T17:05:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:43 crc kubenswrapper[4753]: I1205 17:05:43.719927 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:05:43 crc kubenswrapper[4753]: E1205 17:05:43.720201 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:05:43 crc kubenswrapper[4753]: I1205 17:05:43.721202 4753 scope.go:117] "RemoveContainer" containerID="9b6e87477fcc60efe5e27b6865aa0f91f58bb756f255afe613932a906bd6047a" Dec 05 17:05:43 crc kubenswrapper[4753]: I1205 17:05:43.736004 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:43 crc kubenswrapper[4753]: I1205 17:05:43.736233 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:43 crc kubenswrapper[4753]: I1205 17:05:43.736346 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:43 crc kubenswrapper[4753]: I1205 17:05:43.736398 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:43 crc kubenswrapper[4753]: I1205 17:05:43.736412 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:43Z","lastTransitionTime":"2025-12-05T17:05:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:43 crc kubenswrapper[4753]: I1205 17:05:43.840110 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:43 crc kubenswrapper[4753]: I1205 17:05:43.840628 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:43 crc kubenswrapper[4753]: I1205 17:05:43.840652 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:43 crc kubenswrapper[4753]: I1205 17:05:43.840683 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:43 crc kubenswrapper[4753]: I1205 17:05:43.840705 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:43Z","lastTransitionTime":"2025-12-05T17:05:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:43 crc kubenswrapper[4753]: I1205 17:05:43.943888 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:43 crc kubenswrapper[4753]: I1205 17:05:43.943952 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:43 crc kubenswrapper[4753]: I1205 17:05:43.943967 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:43 crc kubenswrapper[4753]: I1205 17:05:43.943989 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:43 crc kubenswrapper[4753]: I1205 17:05:43.944002 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:43Z","lastTransitionTime":"2025-12-05T17:05:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:44 crc kubenswrapper[4753]: I1205 17:05:44.048023 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:44 crc kubenswrapper[4753]: I1205 17:05:44.048121 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:44 crc kubenswrapper[4753]: I1205 17:05:44.048186 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:44 crc kubenswrapper[4753]: I1205 17:05:44.048225 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:44 crc kubenswrapper[4753]: I1205 17:05:44.048254 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:44Z","lastTransitionTime":"2025-12-05T17:05:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:44 crc kubenswrapper[4753]: I1205 17:05:44.150826 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:44 crc kubenswrapper[4753]: I1205 17:05:44.151115 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:44 crc kubenswrapper[4753]: I1205 17:05:44.151211 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:44 crc kubenswrapper[4753]: I1205 17:05:44.151321 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:44 crc kubenswrapper[4753]: I1205 17:05:44.151395 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:44Z","lastTransitionTime":"2025-12-05T17:05:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:44 crc kubenswrapper[4753]: I1205 17:05:44.255683 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:44 crc kubenswrapper[4753]: I1205 17:05:44.255756 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:44 crc kubenswrapper[4753]: I1205 17:05:44.255777 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:44 crc kubenswrapper[4753]: I1205 17:05:44.255804 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:44 crc kubenswrapper[4753]: I1205 17:05:44.255824 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:44Z","lastTransitionTime":"2025-12-05T17:05:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:44 crc kubenswrapper[4753]: I1205 17:05:44.358907 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:44 crc kubenswrapper[4753]: I1205 17:05:44.358960 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:44 crc kubenswrapper[4753]: I1205 17:05:44.358971 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:44 crc kubenswrapper[4753]: I1205 17:05:44.358989 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:44 crc kubenswrapper[4753]: I1205 17:05:44.359000 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:44Z","lastTransitionTime":"2025-12-05T17:05:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:44 crc kubenswrapper[4753]: I1205 17:05:44.462620 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:44 crc kubenswrapper[4753]: I1205 17:05:44.462676 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:44 crc kubenswrapper[4753]: I1205 17:05:44.462700 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:44 crc kubenswrapper[4753]: I1205 17:05:44.462730 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:44 crc kubenswrapper[4753]: I1205 17:05:44.462754 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:44Z","lastTransitionTime":"2025-12-05T17:05:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:44 crc kubenswrapper[4753]: I1205 17:05:44.565831 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:44 crc kubenswrapper[4753]: I1205 17:05:44.565893 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:44 crc kubenswrapper[4753]: I1205 17:05:44.565913 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:44 crc kubenswrapper[4753]: I1205 17:05:44.565943 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:44 crc kubenswrapper[4753]: I1205 17:05:44.565961 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:44Z","lastTransitionTime":"2025-12-05T17:05:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:44 crc kubenswrapper[4753]: I1205 17:05:44.668770 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:44 crc kubenswrapper[4753]: I1205 17:05:44.668826 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:44 crc kubenswrapper[4753]: I1205 17:05:44.668850 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:44 crc kubenswrapper[4753]: I1205 17:05:44.668878 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:44 crc kubenswrapper[4753]: I1205 17:05:44.668893 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:44Z","lastTransitionTime":"2025-12-05T17:05:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:44 crc kubenswrapper[4753]: I1205 17:05:44.719775 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:05:44 crc kubenswrapper[4753]: I1205 17:05:44.719804 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jjgfd" Dec 05 17:05:44 crc kubenswrapper[4753]: I1205 17:05:44.719925 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:05:44 crc kubenswrapper[4753]: E1205 17:05:44.720091 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:05:44 crc kubenswrapper[4753]: E1205 17:05:44.720416 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:05:44 crc kubenswrapper[4753]: E1205 17:05:44.720487 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jjgfd" podUID="00ab636b-9cc9-4a6f-8e6e-6442b35280ca" Dec 05 17:05:44 crc kubenswrapper[4753]: I1205 17:05:44.738366 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:05:44 crc kubenswrapper[4753]: I1205 17:05:44.738455 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:05:44 crc kubenswrapper[4753]: I1205 17:05:44.738475 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:05:44 crc kubenswrapper[4753]: I1205 17:05:44.738515 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:05:44 crc kubenswrapper[4753]: I1205 17:05:44.738540 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:05:44 crc kubenswrapper[4753]: E1205 17:05:44.738650 4753 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 17:05:44 crc kubenswrapper[4753]: E1205 17:05:44.738667 4753 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 17:05:44 crc kubenswrapper[4753]: E1205 17:05:44.738670 4753 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 17:05:44 crc kubenswrapper[4753]: E1205 17:05:44.738747 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 17:06:48.738729701 +0000 UTC m=+147.241836697 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 17:05:44 crc kubenswrapper[4753]: E1205 17:05:44.738682 4753 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 17:05:44 crc kubenswrapper[4753]: E1205 17:05:44.738782 4753 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 17:05:44 crc kubenswrapper[4753]: E1205 17:05:44.738803 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:06:48.738777363 +0000 UTC m=+147.241884379 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:05:44 crc kubenswrapper[4753]: E1205 17:05:44.738881 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 17:06:48.738862685 +0000 UTC m=+147.241969701 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 17:05:44 crc kubenswrapper[4753]: E1205 17:05:44.738903 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 17:06:48.738894006 +0000 UTC m=+147.242001032 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 17:05:44 crc kubenswrapper[4753]: E1205 17:05:44.738949 4753 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 17:05:44 crc kubenswrapper[4753]: E1205 17:05:44.738963 4753 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 17:05:44 crc kubenswrapper[4753]: E1205 17:05:44.738977 4753 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 17:05:44 crc kubenswrapper[4753]: E1205 17:05:44.739007 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 17:06:48.738998559 +0000 UTC m=+147.242105575 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 17:05:44 crc kubenswrapper[4753]: I1205 17:05:44.771212 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:44 crc kubenswrapper[4753]: I1205 17:05:44.771249 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:44 crc kubenswrapper[4753]: I1205 17:05:44.771260 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:44 crc kubenswrapper[4753]: I1205 17:05:44.771275 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:44 crc kubenswrapper[4753]: I1205 17:05:44.771305 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:44Z","lastTransitionTime":"2025-12-05T17:05:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:44 crc kubenswrapper[4753]: I1205 17:05:44.874004 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:44 crc kubenswrapper[4753]: I1205 17:05:44.874039 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:44 crc kubenswrapper[4753]: I1205 17:05:44.874049 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:44 crc kubenswrapper[4753]: I1205 17:05:44.874061 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:44 crc kubenswrapper[4753]: I1205 17:05:44.874070 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:44Z","lastTransitionTime":"2025-12-05T17:05:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:44 crc kubenswrapper[4753]: I1205 17:05:44.977020 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:44 crc kubenswrapper[4753]: I1205 17:05:44.977052 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:44 crc kubenswrapper[4753]: I1205 17:05:44.977063 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:44 crc kubenswrapper[4753]: I1205 17:05:44.977080 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:44 crc kubenswrapper[4753]: I1205 17:05:44.977092 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:44Z","lastTransitionTime":"2025-12-05T17:05:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:45 crc kubenswrapper[4753]: I1205 17:05:45.079119 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:45 crc kubenswrapper[4753]: I1205 17:05:45.079167 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:45 crc kubenswrapper[4753]: I1205 17:05:45.079178 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:45 crc kubenswrapper[4753]: I1205 17:05:45.079192 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:45 crc kubenswrapper[4753]: I1205 17:05:45.079202 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:45Z","lastTransitionTime":"2025-12-05T17:05:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:45 crc kubenswrapper[4753]: I1205 17:05:45.143056 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-98fvv_f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a/ovnkube-controller/2.log" Dec 05 17:05:45 crc kubenswrapper[4753]: I1205 17:05:45.145258 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" event={"ID":"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a","Type":"ContainerStarted","Data":"7a20782d19103ad754e8771413e6ca949a706a6876c2e949437969541c112d24"} Dec 05 17:05:45 crc kubenswrapper[4753]: I1205 17:05:45.145623 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" Dec 05 17:05:45 crc kubenswrapper[4753]: I1205 17:05:45.156249 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-f6qn6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"693c26bb-e75f-4f7f-bd2a-bdf7dcb0af06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02ea0085a961c324f4a426d68972043f4b24b3aa03aed623ebc46bbafc857d1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g4w6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-f6qn6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:45 crc kubenswrapper[4753]: I1205 17:05:45.168501 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bhvk4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"338d3da9-209c-4ca9-a37d-6ea5731d1622\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7747e4738055a7b994119bad9b30d47e0b510c7407c53df29b674c553dbbcbb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qh6zg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://097885ac231f081a81d51cf0091df6f93ca49068340438454db429194e5475b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qh6zg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-bhvk4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:45 crc kubenswrapper[4753]: I1205 17:05:45.178650 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69636b03-af7a-49ce-b136-0ea61ea1bc56\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8012c6347b457094531c3dfc5c7f18bec3f7cebb83d341ab1ed030443d6f4455\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e42b55a106572b6b023a9d60bea634c6a6d7600989d23ba444a6366fd4980a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e42b55a106572b6b023a9d60bea634c6a6d7600989d23ba444a6366fd4980a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:45 crc kubenswrapper[4753]: I1205 17:05:45.181253 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:45 crc kubenswrapper[4753]: I1205 17:05:45.181304 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:45 crc kubenswrapper[4753]: I1205 17:05:45.181316 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:45 crc kubenswrapper[4753]: I1205 17:05:45.181335 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:45 crc kubenswrapper[4753]: I1205 17:05:45.181347 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:45Z","lastTransitionTime":"2025-12-05T17:05:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:45 crc kubenswrapper[4753]: I1205 17:05:45.192484 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65284411-042e-46b9-8e55-cabc9e78a397\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e83b48779d4b8b709f9bd7351180040315002f738ada5ab034c883f87ef8734\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f6e27e109eeb52dae4e4b35bdf84e78f0786fc8d15c2498dd8d8c70c3d64e4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7886d72b05768ec1d515cfb2837f52ae16e82445b72a3016762308b80892d55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a662a8ca2ca9bcf02ace463bf26e44293b9776c69f3f1f84110392c3cab6e83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:45 crc kubenswrapper[4753]: I1205 17:05:45.206587 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fb6627d3-0817-4737-b5c7-00b2abc67b0f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bba91e82256742fa5b98e2e481b8ab267ce89d71e180581f34fa371d03bbc1c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf90148205c7fde7c73b3ed1130fa5c9fc4b6562c0e1f4f6b009a72677f32f22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95027e80190e6caa0524ede7ef31c38c8861aeca94936b1a7a819fdc650c7969\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1bd3444395954ccae8d5c8137c6ebd7f62a6c864cea89f98a137af3e26cda8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1bd3444395954ccae8d5c8137c6ebd7f62a6c864cea89f98a137af3e26cda8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:45 crc kubenswrapper[4753]: I1205 17:05:45.225925 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2rg4s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"751d4d21-4eb8-4236-bca2-d81f094ff2f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bda355ba818e87604cf7cfba53f0ee3116f2d4234e7d9631ec7e6571ed34030\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa8d4e5672a07ea28ddba02eaaf603750cd3067ff47165776d2dc7f2ad42a290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa8d4e5672a07ea28ddba02eaaf603750cd3067ff47165776d2dc7f2ad42a290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87e685f0d2c857d143c3ee4b13adf2afe6796c40ae87e2a304b78783907a785a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87e685f0d2c857d143c3ee4b13adf2afe6796c40ae87e2a304b78783907a785a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://842a75d6b100069785dbaf413df3b610a5df28c9ec124100eb31c965985282fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://842a75d6b100069785dbaf413df3b610a5df28c9ec124100eb31c965985282fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4ac00f8ef346db3de91e7bec01a56eac9f27d5a501d6f686173ece4897b9694\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4ac00f8ef346db3de91e7bec01a56eac9f27d5a501d6f686173ece4897b9694\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3585437cbe0f17e5bf92a74123f0e6f13475b6e5d49b145f2b9b869559b53310\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3585437cbe0f17e5bf92a74123f0e6f13475b6e5d49b145f2b9b869559b53310\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e60aa96f2377e63e44e932efe51f79832b778b8ef41fd47e728a80286956c62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e60aa96f2377e63e44e932efe51f79832b778b8ef41fd47e728a80286956c62\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2rg4s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:45 crc kubenswrapper[4753]: I1205 17:05:45.245442 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f6585b3b62fc4a78f89b3413326d6d5259cb8c338936f2d5def6185d81d46f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cb72322d436f10b006a4bb7a91b255451aba90e86100fdf249be6443159bd89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d0273d889da0bd6ea3db8040da2eab62fc750f308cb9a62dee71b6c9f2fadd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e79e99eb44ff95c2f9929326313e41629e6e03ebc068537bd27bc4c89ad5b0ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20bcab2c2912120c819bacdf478db82a78500908125e6efadaef1f3409eb0d49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ead15f2a3179f03d770beadcaa1374561d5aebb4874615a334edccff0c2f300\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a20782d19103ad754e8771413e6ca949a706a6876c2e949437969541c112d24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b6e87477fcc60efe5e27b6865aa0f91f58bb756f255afe613932a906bd6047a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:05:17Z\\\",\\\"message\\\":\\\"all/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 17:05:17.523236 6490 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 17:05:17.523267 6490 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1205 17:05:17.523298 6490 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1205 17:05:17.523307 6490 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1205 17:05:17.523331 6490 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1205 17:05:17.523345 6490 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1205 17:05:17.523344 6490 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1205 17:05:17.523350 6490 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1205 17:05:17.523358 6490 handler.go:208] Removed *v1.Node event handler 7\\\\nI1205 17:05:17.523364 6490 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 17:05:17.523371 6490 handler.go:208] Removed *v1.Node event handler 2\\\\nI1205 17:05:17.523370 6490 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 17:05:17.523388 6490 factory.go:656] Stopping watch factory\\\\nI1205 17:05:17.523400 6490 ovnkube.go:599] Stopped ovnkube\\\\nI1205 17:05:17.523437 6490 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 17\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:05:16Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:05:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://899db0b5d45776b0560a2908ce63ec8ea2b458e2e492189737e930688b4ed2e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://073ffcef7324b498f2cb47680b64bf44e474a7732a5c2b34661da22c3e0f6c17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://073ffcef7324b498f2cb47680b64bf44e474a7732a5c2b34661da22c3e0f6c17\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-98fvv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:45 crc kubenswrapper[4753]: I1205 17:05:45.259566 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://408690fbd818b50273c914cc463acec30ff77bbf7d26680bffa2451a1d1ddcf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:45 crc kubenswrapper[4753]: I1205 17:05:45.273339 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78c3b80f4d08dc9fd4215fc87bbd7331f765f301fa587f5fd4ded54cd8db6587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60c5d0ca3d26171050d592c3d76f0db4ca1a12344c40825a8e7c5fd579814c80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-khn68\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:45 crc kubenswrapper[4753]: I1205 17:05:45.283386 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:45 crc kubenswrapper[4753]: I1205 17:05:45.283424 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:45 crc kubenswrapper[4753]: I1205 17:05:45.283434 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:45 crc kubenswrapper[4753]: I1205 17:05:45.283449 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:45 crc kubenswrapper[4753]: I1205 17:05:45.283458 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:45Z","lastTransitionTime":"2025-12-05T17:05:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:45 crc kubenswrapper[4753]: I1205 17:05:45.283849 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-jjgfd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"00ab636b-9cc9-4a6f-8e6e-6442b35280ca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhlg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhlg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:57Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-jjgfd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:45 crc kubenswrapper[4753]: I1205 17:05:45.296496 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee37fdbe-64d5-4bb1-8522-932d83e0648e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://195b8bf18e6cf937def214e377ec9def799c19ea4d5f008b66cc42048656ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a5f37b26bfe7933088e08379662250e7b188efbbb5784d49e8d86262e10416\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0061bf86592b301a0a9398d2938e614f76616ed7ddcf310aa50c961632fb58b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20eb41ef00b7711fdfef0e4f3bd83b2fbdb193be96cafc0ef87f048271e7dd78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fea878c0275a3934d26327c13cf5a93ae3b881132788f3d031af5b382bbdd02\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 17:04:35.064086 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 17:04:35.066802 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1773129596/tls.crt::/tmp/serving-cert-1773129596/tls.key\\\\\\\"\\\\nI1205 17:04:40.565607 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 17:04:40.585450 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 17:04:40.585477 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 17:04:40.585506 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 17:04:40.585513 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:04:40.600592 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:04:40.600617 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600622 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600626 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:04:40.600629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:04:40.600632 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:04:40.600635 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:04:40.600704 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 17:04:40.602558 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0585e546af65a61265267b97083556b811ad35ee6c48fa84262b3f4f25eaf907\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:45 crc kubenswrapper[4753]: I1205 17:05:45.308110 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8934d1ec7490b421708e4678d69a55cdd381563ada3d945d066fcf4abba49f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e799acff1e50a664e95e30896e435bc77a35c8560044c5f180e16c3534bcf828\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:45 crc kubenswrapper[4753]: I1205 17:05:45.320177 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c52dbe30298514a572fd7cf4a301357ef0290af5894be7f9d08aff6c3fbe85a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:45 crc kubenswrapper[4753]: I1205 17:05:45.334239 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hpl8r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1096ebd14458819c0dbe4f5a86f2812eb19236d7d5062bfe2eda62ecdc05c55e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b2cc49130697a15123f202462d2f0781903baea370d4906d22d1b39c23320d7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:05:30Z\\\",\\\"message\\\":\\\"2025-12-05T17:04:44+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_2d7d6f8b-5939-404d-a339-48cbe066dc8a\\\\n2025-12-05T17:04:44+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_2d7d6f8b-5939-404d-a339-48cbe066dc8a to /host/opt/cni/bin/\\\\n2025-12-05T17:04:44Z [verbose] multus-daemon started\\\\n2025-12-05T17:04:44Z [verbose] Readiness Indicator file check\\\\n2025-12-05T17:05:29Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:05:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmzrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hpl8r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:45 crc kubenswrapper[4753]: I1205 17:05:45.345085 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:45 crc kubenswrapper[4753]: I1205 17:05:45.356237 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:45 crc kubenswrapper[4753]: I1205 17:05:45.366836 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:45 crc kubenswrapper[4753]: I1205 17:05:45.375286 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vj5f7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f38be29-f040-4e7d-9026-36929c0c5cda\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b5d403644e00b367518058ecf22ed9913df9a1353e2fede8802deaa288a48e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc2cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vj5f7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:45 crc kubenswrapper[4753]: I1205 17:05:45.384665 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:45 crc kubenswrapper[4753]: I1205 17:05:45.384689 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:45 crc kubenswrapper[4753]: I1205 17:05:45.384697 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:45 crc kubenswrapper[4753]: I1205 17:05:45.384710 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:45 crc kubenswrapper[4753]: I1205 17:05:45.384720 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:45Z","lastTransitionTime":"2025-12-05T17:05:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:45 crc kubenswrapper[4753]: I1205 17:05:45.487083 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:45 crc kubenswrapper[4753]: I1205 17:05:45.487129 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:45 crc kubenswrapper[4753]: I1205 17:05:45.487138 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:45 crc kubenswrapper[4753]: I1205 17:05:45.487186 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:45 crc kubenswrapper[4753]: I1205 17:05:45.487198 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:45Z","lastTransitionTime":"2025-12-05T17:05:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:45 crc kubenswrapper[4753]: I1205 17:05:45.588739 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:45 crc kubenswrapper[4753]: I1205 17:05:45.588775 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:45 crc kubenswrapper[4753]: I1205 17:05:45.588784 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:45 crc kubenswrapper[4753]: I1205 17:05:45.588799 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:45 crc kubenswrapper[4753]: I1205 17:05:45.588808 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:45Z","lastTransitionTime":"2025-12-05T17:05:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:45 crc kubenswrapper[4753]: I1205 17:05:45.691584 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:45 crc kubenswrapper[4753]: I1205 17:05:45.691650 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:45 crc kubenswrapper[4753]: I1205 17:05:45.691663 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:45 crc kubenswrapper[4753]: I1205 17:05:45.691682 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:45 crc kubenswrapper[4753]: I1205 17:05:45.691695 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:45Z","lastTransitionTime":"2025-12-05T17:05:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:45 crc kubenswrapper[4753]: I1205 17:05:45.719824 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:05:45 crc kubenswrapper[4753]: E1205 17:05:45.720009 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:05:45 crc kubenswrapper[4753]: I1205 17:05:45.793775 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:45 crc kubenswrapper[4753]: I1205 17:05:45.793820 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:45 crc kubenswrapper[4753]: I1205 17:05:45.793832 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:45 crc kubenswrapper[4753]: I1205 17:05:45.793848 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:45 crc kubenswrapper[4753]: I1205 17:05:45.793858 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:45Z","lastTransitionTime":"2025-12-05T17:05:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:45 crc kubenswrapper[4753]: I1205 17:05:45.896810 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:45 crc kubenswrapper[4753]: I1205 17:05:45.896859 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:45 crc kubenswrapper[4753]: I1205 17:05:45.896870 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:45 crc kubenswrapper[4753]: I1205 17:05:45.896885 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:45 crc kubenswrapper[4753]: I1205 17:05:45.896895 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:45Z","lastTransitionTime":"2025-12-05T17:05:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:45 crc kubenswrapper[4753]: I1205 17:05:45.999248 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:45 crc kubenswrapper[4753]: I1205 17:05:45.999295 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:45 crc kubenswrapper[4753]: I1205 17:05:45.999331 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:45 crc kubenswrapper[4753]: I1205 17:05:45.999351 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:45 crc kubenswrapper[4753]: I1205 17:05:45.999362 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:45Z","lastTransitionTime":"2025-12-05T17:05:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:46 crc kubenswrapper[4753]: I1205 17:05:46.749057 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:05:46 crc kubenswrapper[4753]: E1205 17:05:46.749505 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:05:46 crc kubenswrapper[4753]: I1205 17:05:46.749608 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:05:46 crc kubenswrapper[4753]: I1205 17:05:46.749671 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jjgfd" Dec 05 17:05:46 crc kubenswrapper[4753]: E1205 17:05:46.749828 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:05:46 crc kubenswrapper[4753]: E1205 17:05:46.750350 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jjgfd" podUID="00ab636b-9cc9-4a6f-8e6e-6442b35280ca" Dec 05 17:05:46 crc kubenswrapper[4753]: I1205 17:05:46.751239 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:46 crc kubenswrapper[4753]: I1205 17:05:46.751267 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:46 crc kubenswrapper[4753]: I1205 17:05:46.751277 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:46 crc kubenswrapper[4753]: I1205 17:05:46.751294 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:46 crc kubenswrapper[4753]: I1205 17:05:46.751307 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:46Z","lastTransitionTime":"2025-12-05T17:05:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:46 crc kubenswrapper[4753]: I1205 17:05:46.752249 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:46 crc kubenswrapper[4753]: I1205 17:05:46.752299 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:46 crc kubenswrapper[4753]: I1205 17:05:46.752313 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:46 crc kubenswrapper[4753]: I1205 17:05:46.752333 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:46 crc kubenswrapper[4753]: I1205 17:05:46.752349 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:46Z","lastTransitionTime":"2025-12-05T17:05:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:46 crc kubenswrapper[4753]: I1205 17:05:46.754872 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-98fvv_f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a/ovnkube-controller/3.log" Dec 05 17:05:46 crc kubenswrapper[4753]: I1205 17:05:46.755429 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-98fvv_f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a/ovnkube-controller/2.log" Dec 05 17:05:46 crc kubenswrapper[4753]: I1205 17:05:46.758077 4753 generic.go:334] "Generic (PLEG): container finished" podID="f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" containerID="7a20782d19103ad754e8771413e6ca949a706a6876c2e949437969541c112d24" exitCode=1 Dec 05 17:05:46 crc kubenswrapper[4753]: I1205 17:05:46.758115 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" event={"ID":"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a","Type":"ContainerDied","Data":"7a20782d19103ad754e8771413e6ca949a706a6876c2e949437969541c112d24"} Dec 05 17:05:46 crc kubenswrapper[4753]: I1205 17:05:46.758166 4753 scope.go:117] "RemoveContainer" containerID="9b6e87477fcc60efe5e27b6865aa0f91f58bb756f255afe613932a906bd6047a" Dec 05 17:05:46 crc kubenswrapper[4753]: I1205 17:05:46.758984 4753 scope.go:117] "RemoveContainer" containerID="7a20782d19103ad754e8771413e6ca949a706a6876c2e949437969541c112d24" Dec 05 17:05:46 crc kubenswrapper[4753]: E1205 17:05:46.759211 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-98fvv_openshift-ovn-kubernetes(f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a)\"" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" podUID="f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" Dec 05 17:05:46 crc kubenswrapper[4753]: E1205 17:05:46.772976 4753 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:46Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:46Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6cf3541b-b15a-4035-99a1-dd4a7b86e07c\\\",\\\"systemUUID\\\":\\\"6ae126b1-60b3-4aa4-8711-3da4c1b89426\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:46Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:46 crc kubenswrapper[4753]: I1205 17:05:46.776844 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65284411-042e-46b9-8e55-cabc9e78a397\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e83b48779d4b8b709f9bd7351180040315002f738ada5ab034c883f87ef8734\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f6e27e109eeb52dae4e4b35bdf84e78f0786fc8d15c2498dd8d8c70c3d64e4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7886d72b05768ec1d515cfb2837f52ae16e82445b72a3016762308b80892d55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a662a8ca2ca9bcf02ace463bf26e44293b9776c69f3f1f84110392c3cab6e83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:46Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:46 crc kubenswrapper[4753]: I1205 17:05:46.777644 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:46 crc kubenswrapper[4753]: I1205 17:05:46.777676 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:46 crc kubenswrapper[4753]: I1205 17:05:46.777687 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:46 crc kubenswrapper[4753]: I1205 17:05:46.777706 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:46 crc kubenswrapper[4753]: I1205 17:05:46.777716 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:46Z","lastTransitionTime":"2025-12-05T17:05:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:46 crc kubenswrapper[4753]: I1205 17:05:46.791920 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fb6627d3-0817-4737-b5c7-00b2abc67b0f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bba91e82256742fa5b98e2e481b8ab267ce89d71e180581f34fa371d03bbc1c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf90148205c7fde7c73b3ed1130fa5c9fc4b6562c0e1f4f6b009a72677f32f22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95027e80190e6caa0524ede7ef31c38c8861aeca94936b1a7a819fdc650c7969\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1bd3444395954ccae8d5c8137c6ebd7f62a6c864cea89f98a137af3e26cda8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1bd3444395954ccae8d5c8137c6ebd7f62a6c864cea89f98a137af3e26cda8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:46Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:46 crc kubenswrapper[4753]: E1205 17:05:46.793954 4753 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:46Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:46Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6cf3541b-b15a-4035-99a1-dd4a7b86e07c\\\",\\\"systemUUID\\\":\\\"6ae126b1-60b3-4aa4-8711-3da4c1b89426\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:46Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:46 crc kubenswrapper[4753]: I1205 17:05:46.797775 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:46 crc kubenswrapper[4753]: I1205 17:05:46.797811 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:46 crc kubenswrapper[4753]: I1205 17:05:46.797822 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:46 crc kubenswrapper[4753]: I1205 17:05:46.797839 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:46 crc kubenswrapper[4753]: I1205 17:05:46.797852 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:46Z","lastTransitionTime":"2025-12-05T17:05:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:46 crc kubenswrapper[4753]: I1205 17:05:46.809120 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2rg4s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"751d4d21-4eb8-4236-bca2-d81f094ff2f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bda355ba818e87604cf7cfba53f0ee3116f2d4234e7d9631ec7e6571ed34030\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa8d4e5672a07ea28ddba02eaaf603750cd3067ff47165776d2dc7f2ad42a290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa8d4e5672a07ea28ddba02eaaf603750cd3067ff47165776d2dc7f2ad42a290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87e685f0d2c857d143c3ee4b13adf2afe6796c40ae87e2a304b78783907a785a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87e685f0d2c857d143c3ee4b13adf2afe6796c40ae87e2a304b78783907a785a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://842a75d6b100069785dbaf413df3b610a5df28c9ec124100eb31c965985282fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://842a75d6b100069785dbaf413df3b610a5df28c9ec124100eb31c965985282fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4ac00f8ef346db3de91e7bec01a56eac9f27d5a501d6f686173ece4897b9694\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4ac00f8ef346db3de91e7bec01a56eac9f27d5a501d6f686173ece4897b9694\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3585437cbe0f17e5bf92a74123f0e6f13475b6e5d49b145f2b9b869559b53310\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3585437cbe0f17e5bf92a74123f0e6f13475b6e5d49b145f2b9b869559b53310\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e60aa96f2377e63e44e932efe51f79832b778b8ef41fd47e728a80286956c62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e60aa96f2377e63e44e932efe51f79832b778b8ef41fd47e728a80286956c62\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2rg4s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:46Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:46 crc kubenswrapper[4753]: E1205 17:05:46.810750 4753 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:46Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:46Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6cf3541b-b15a-4035-99a1-dd4a7b86e07c\\\",\\\"systemUUID\\\":\\\"6ae126b1-60b3-4aa4-8711-3da4c1b89426\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:46Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:46 crc kubenswrapper[4753]: I1205 17:05:46.819729 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:46 crc kubenswrapper[4753]: I1205 17:05:46.819786 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:46 crc kubenswrapper[4753]: I1205 17:05:46.819798 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:46 crc kubenswrapper[4753]: I1205 17:05:46.819820 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:46 crc kubenswrapper[4753]: I1205 17:05:46.819831 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:46Z","lastTransitionTime":"2025-12-05T17:05:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:46 crc kubenswrapper[4753]: E1205 17:05:46.832938 4753 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:46Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:46Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6cf3541b-b15a-4035-99a1-dd4a7b86e07c\\\",\\\"systemUUID\\\":\\\"6ae126b1-60b3-4aa4-8711-3da4c1b89426\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:46Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:46 crc kubenswrapper[4753]: I1205 17:05:46.835521 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f6585b3b62fc4a78f89b3413326d6d5259cb8c338936f2d5def6185d81d46f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cb72322d436f10b006a4bb7a91b255451aba90e86100fdf249be6443159bd89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d0273d889da0bd6ea3db8040da2eab62fc750f308cb9a62dee71b6c9f2fadd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e79e99eb44ff95c2f9929326313e41629e6e03ebc068537bd27bc4c89ad5b0ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20bcab2c2912120c819bacdf478db82a78500908125e6efadaef1f3409eb0d49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ead15f2a3179f03d770beadcaa1374561d5aebb4874615a334edccff0c2f300\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a20782d19103ad754e8771413e6ca949a706a6876c2e949437969541c112d24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b6e87477fcc60efe5e27b6865aa0f91f58bb756f255afe613932a906bd6047a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:05:17Z\\\",\\\"message\\\":\\\"all/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 17:05:17.523236 6490 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 17:05:17.523267 6490 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1205 17:05:17.523298 6490 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1205 17:05:17.523307 6490 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1205 17:05:17.523331 6490 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1205 17:05:17.523345 6490 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1205 17:05:17.523344 6490 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1205 17:05:17.523350 6490 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1205 17:05:17.523358 6490 handler.go:208] Removed *v1.Node event handler 7\\\\nI1205 17:05:17.523364 6490 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 17:05:17.523371 6490 handler.go:208] Removed *v1.Node event handler 2\\\\nI1205 17:05:17.523370 6490 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 17:05:17.523388 6490 factory.go:656] Stopping watch factory\\\\nI1205 17:05:17.523400 6490 ovnkube.go:599] Stopped ovnkube\\\\nI1205 17:05:17.523437 6490 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 17\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:05:16Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a20782d19103ad754e8771413e6ca949a706a6876c2e949437969541c112d24\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:05:45Z\\\",\\\"message\\\":\\\"y.go:365] Adding new object: *v1.Pod openshift-multus/multus-additional-cni-plugins-2rg4s\\\\nI1205 17:05:45.473503 6840 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-operator-lifecycle-manager/package-server-manager-metrics]} name:Service_openshift-operator-lifecycle-manager/package-server-manager-metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.110:8443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {f9232b32-e89f-4c8e-acc4-c6801b70dcb0}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:NB_Global Row:map[] Rows:[] Columns:[] Mutations:[{Column:nb_cfg Mutator:+= Value:1}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {6011affd-30a6-4be6-872d-e4cf1ca780cf}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 17:05:45.473553 6840 ovn.go:134] Ensuring zone local for Pod openshift-multus/multus-additional-cni-plugins-2rg4s in node crc\\\\nI1205 17:05:45.473547 6840 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mut\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:05:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://899db0b5d45776b0560a2908ce63ec8ea2b458e2e492189737e930688b4ed2e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://073ffcef7324b498f2cb47680b64bf44e474a7732a5c2b34661da22c3e0f6c17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://073ffcef7324b498f2cb47680b64bf44e474a7732a5c2b34661da22c3e0f6c17\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-98fvv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:46Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:46 crc kubenswrapper[4753]: I1205 17:05:46.837345 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:46 crc kubenswrapper[4753]: I1205 17:05:46.837377 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:46 crc kubenswrapper[4753]: I1205 17:05:46.837387 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:46 crc kubenswrapper[4753]: I1205 17:05:46.837404 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:46 crc kubenswrapper[4753]: I1205 17:05:46.837415 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:46Z","lastTransitionTime":"2025-12-05T17:05:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:46 crc kubenswrapper[4753]: I1205 17:05:46.847680 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-f6qn6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"693c26bb-e75f-4f7f-bd2a-bdf7dcb0af06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02ea0085a961c324f4a426d68972043f4b24b3aa03aed623ebc46bbafc857d1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g4w6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-f6qn6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:46Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:46 crc kubenswrapper[4753]: E1205 17:05:46.849222 4753 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:46Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:46Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:46Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6cf3541b-b15a-4035-99a1-dd4a7b86e07c\\\",\\\"systemUUID\\\":\\\"6ae126b1-60b3-4aa4-8711-3da4c1b89426\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:46Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:46 crc kubenswrapper[4753]: E1205 17:05:46.849414 4753 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 17:05:46 crc kubenswrapper[4753]: I1205 17:05:46.858450 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:46 crc kubenswrapper[4753]: I1205 17:05:46.858481 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:46 crc kubenswrapper[4753]: I1205 17:05:46.858492 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:46 crc kubenswrapper[4753]: I1205 17:05:46.858509 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:46 crc kubenswrapper[4753]: I1205 17:05:46.858520 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:46Z","lastTransitionTime":"2025-12-05T17:05:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:46 crc kubenswrapper[4753]: I1205 17:05:46.861407 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bhvk4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"338d3da9-209c-4ca9-a37d-6ea5731d1622\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7747e4738055a7b994119bad9b30d47e0b510c7407c53df29b674c553dbbcbb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qh6zg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://097885ac231f081a81d51cf0091df6f93ca49068340438454db429194e5475b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qh6zg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-bhvk4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:46Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:46 crc kubenswrapper[4753]: I1205 17:05:46.876830 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69636b03-af7a-49ce-b136-0ea61ea1bc56\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8012c6347b457094531c3dfc5c7f18bec3f7cebb83d341ab1ed030443d6f4455\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e42b55a106572b6b023a9d60bea634c6a6d7600989d23ba444a6366fd4980a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e42b55a106572b6b023a9d60bea634c6a6d7600989d23ba444a6366fd4980a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:46Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:46 crc kubenswrapper[4753]: I1205 17:05:46.890645 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78c3b80f4d08dc9fd4215fc87bbd7331f765f301fa587f5fd4ded54cd8db6587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60c5d0ca3d26171050d592c3d76f0db4ca1a12344c40825a8e7c5fd579814c80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-khn68\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:46Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:46 crc kubenswrapper[4753]: I1205 17:05:46.904694 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-jjgfd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"00ab636b-9cc9-4a6f-8e6e-6442b35280ca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhlg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhlg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:57Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-jjgfd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:46Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:46 crc kubenswrapper[4753]: I1205 17:05:46.916378 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://408690fbd818b50273c914cc463acec30ff77bbf7d26680bffa2451a1d1ddcf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:46Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:46 crc kubenswrapper[4753]: I1205 17:05:46.928429 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8934d1ec7490b421708e4678d69a55cdd381563ada3d945d066fcf4abba49f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e799acff1e50a664e95e30896e435bc77a35c8560044c5f180e16c3534bcf828\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:46Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:46 crc kubenswrapper[4753]: I1205 17:05:46.942039 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c52dbe30298514a572fd7cf4a301357ef0290af5894be7f9d08aff6c3fbe85a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:46Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:46 crc kubenswrapper[4753]: I1205 17:05:46.957854 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hpl8r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1096ebd14458819c0dbe4f5a86f2812eb19236d7d5062bfe2eda62ecdc05c55e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b2cc49130697a15123f202462d2f0781903baea370d4906d22d1b39c23320d7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:05:30Z\\\",\\\"message\\\":\\\"2025-12-05T17:04:44+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_2d7d6f8b-5939-404d-a339-48cbe066dc8a\\\\n2025-12-05T17:04:44+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_2d7d6f8b-5939-404d-a339-48cbe066dc8a to /host/opt/cni/bin/\\\\n2025-12-05T17:04:44Z [verbose] multus-daemon started\\\\n2025-12-05T17:04:44Z [verbose] Readiness Indicator file check\\\\n2025-12-05T17:05:29Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:05:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmzrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hpl8r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:46Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:46 crc kubenswrapper[4753]: I1205 17:05:46.961469 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:46 crc kubenswrapper[4753]: I1205 17:05:46.961556 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:46 crc kubenswrapper[4753]: I1205 17:05:46.961576 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:46 crc kubenswrapper[4753]: I1205 17:05:46.961606 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:46 crc kubenswrapper[4753]: I1205 17:05:46.961630 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:46Z","lastTransitionTime":"2025-12-05T17:05:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:46 crc kubenswrapper[4753]: I1205 17:05:46.972750 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee37fdbe-64d5-4bb1-8522-932d83e0648e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://195b8bf18e6cf937def214e377ec9def799c19ea4d5f008b66cc42048656ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a5f37b26bfe7933088e08379662250e7b188efbbb5784d49e8d86262e10416\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0061bf86592b301a0a9398d2938e614f76616ed7ddcf310aa50c961632fb58b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20eb41ef00b7711fdfef0e4f3bd83b2fbdb193be96cafc0ef87f048271e7dd78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fea878c0275a3934d26327c13cf5a93ae3b881132788f3d031af5b382bbdd02\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 17:04:35.064086 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 17:04:35.066802 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1773129596/tls.crt::/tmp/serving-cert-1773129596/tls.key\\\\\\\"\\\\nI1205 17:04:40.565607 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 17:04:40.585450 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 17:04:40.585477 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 17:04:40.585506 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 17:04:40.585513 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:04:40.600592 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:04:40.600617 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600622 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600626 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:04:40.600629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:04:40.600632 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:04:40.600635 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:04:40.600704 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 17:04:40.602558 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0585e546af65a61265267b97083556b811ad35ee6c48fa84262b3f4f25eaf907\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:46Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:46 crc kubenswrapper[4753]: I1205 17:05:46.987645 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:46Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:47 crc kubenswrapper[4753]: I1205 17:05:47.000888 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:46Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:47 crc kubenswrapper[4753]: I1205 17:05:47.011017 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vj5f7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f38be29-f040-4e7d-9026-36929c0c5cda\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b5d403644e00b367518058ecf22ed9913df9a1353e2fede8802deaa288a48e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc2cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vj5f7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:47Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:47 crc kubenswrapper[4753]: I1205 17:05:47.025651 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:47Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:47 crc kubenswrapper[4753]: I1205 17:05:47.065099 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:47 crc kubenswrapper[4753]: I1205 17:05:47.065141 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:47 crc kubenswrapper[4753]: I1205 17:05:47.065175 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:47 crc kubenswrapper[4753]: I1205 17:05:47.065191 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:47 crc kubenswrapper[4753]: I1205 17:05:47.065205 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:47Z","lastTransitionTime":"2025-12-05T17:05:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:47 crc kubenswrapper[4753]: I1205 17:05:47.168046 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:47 crc kubenswrapper[4753]: I1205 17:05:47.168081 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:47 crc kubenswrapper[4753]: I1205 17:05:47.168091 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:47 crc kubenswrapper[4753]: I1205 17:05:47.168106 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:47 crc kubenswrapper[4753]: I1205 17:05:47.168115 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:47Z","lastTransitionTime":"2025-12-05T17:05:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:47 crc kubenswrapper[4753]: I1205 17:05:47.270628 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:47 crc kubenswrapper[4753]: I1205 17:05:47.270674 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:47 crc kubenswrapper[4753]: I1205 17:05:47.270686 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:47 crc kubenswrapper[4753]: I1205 17:05:47.270702 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:47 crc kubenswrapper[4753]: I1205 17:05:47.270714 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:47Z","lastTransitionTime":"2025-12-05T17:05:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:47 crc kubenswrapper[4753]: I1205 17:05:47.373964 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:47 crc kubenswrapper[4753]: I1205 17:05:47.374019 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:47 crc kubenswrapper[4753]: I1205 17:05:47.374042 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:47 crc kubenswrapper[4753]: I1205 17:05:47.374068 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:47 crc kubenswrapper[4753]: I1205 17:05:47.374085 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:47Z","lastTransitionTime":"2025-12-05T17:05:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:47 crc kubenswrapper[4753]: I1205 17:05:47.477004 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:47 crc kubenswrapper[4753]: I1205 17:05:47.477041 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:47 crc kubenswrapper[4753]: I1205 17:05:47.477051 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:47 crc kubenswrapper[4753]: I1205 17:05:47.477067 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:47 crc kubenswrapper[4753]: I1205 17:05:47.477077 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:47Z","lastTransitionTime":"2025-12-05T17:05:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:47 crc kubenswrapper[4753]: I1205 17:05:47.580603 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:47 crc kubenswrapper[4753]: I1205 17:05:47.580642 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:47 crc kubenswrapper[4753]: I1205 17:05:47.580652 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:47 crc kubenswrapper[4753]: I1205 17:05:47.580666 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:47 crc kubenswrapper[4753]: I1205 17:05:47.580675 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:47Z","lastTransitionTime":"2025-12-05T17:05:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:47 crc kubenswrapper[4753]: I1205 17:05:47.682790 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:47 crc kubenswrapper[4753]: I1205 17:05:47.682832 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:47 crc kubenswrapper[4753]: I1205 17:05:47.682849 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:47 crc kubenswrapper[4753]: I1205 17:05:47.682870 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:47 crc kubenswrapper[4753]: I1205 17:05:47.682886 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:47Z","lastTransitionTime":"2025-12-05T17:05:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:47 crc kubenswrapper[4753]: I1205 17:05:47.720251 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:05:47 crc kubenswrapper[4753]: E1205 17:05:47.720408 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:05:47 crc kubenswrapper[4753]: I1205 17:05:47.762726 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-98fvv_f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a/ovnkube-controller/3.log" Dec 05 17:05:47 crc kubenswrapper[4753]: I1205 17:05:47.767403 4753 scope.go:117] "RemoveContainer" containerID="7a20782d19103ad754e8771413e6ca949a706a6876c2e949437969541c112d24" Dec 05 17:05:47 crc kubenswrapper[4753]: E1205 17:05:47.767716 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-98fvv_openshift-ovn-kubernetes(f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a)\"" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" podUID="f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" Dec 05 17:05:47 crc kubenswrapper[4753]: I1205 17:05:47.781740 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8934d1ec7490b421708e4678d69a55cdd381563ada3d945d066fcf4abba49f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e799acff1e50a664e95e30896e435bc77a35c8560044c5f180e16c3534bcf828\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:47Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:47 crc kubenswrapper[4753]: I1205 17:05:47.785584 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:47 crc kubenswrapper[4753]: I1205 17:05:47.785622 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:47 crc kubenswrapper[4753]: I1205 17:05:47.785637 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:47 crc kubenswrapper[4753]: I1205 17:05:47.785656 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:47 crc kubenswrapper[4753]: I1205 17:05:47.785672 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:47Z","lastTransitionTime":"2025-12-05T17:05:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:47 crc kubenswrapper[4753]: I1205 17:05:47.795404 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c52dbe30298514a572fd7cf4a301357ef0290af5894be7f9d08aff6c3fbe85a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:47Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:47 crc kubenswrapper[4753]: I1205 17:05:47.810350 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hpl8r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1096ebd14458819c0dbe4f5a86f2812eb19236d7d5062bfe2eda62ecdc05c55e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b2cc49130697a15123f202462d2f0781903baea370d4906d22d1b39c23320d7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:05:30Z\\\",\\\"message\\\":\\\"2025-12-05T17:04:44+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_2d7d6f8b-5939-404d-a339-48cbe066dc8a\\\\n2025-12-05T17:04:44+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_2d7d6f8b-5939-404d-a339-48cbe066dc8a to /host/opt/cni/bin/\\\\n2025-12-05T17:04:44Z [verbose] multus-daemon started\\\\n2025-12-05T17:04:44Z [verbose] Readiness Indicator file check\\\\n2025-12-05T17:05:29Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:05:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmzrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hpl8r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:47Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:47 crc kubenswrapper[4753]: I1205 17:05:47.831815 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee37fdbe-64d5-4bb1-8522-932d83e0648e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://195b8bf18e6cf937def214e377ec9def799c19ea4d5f008b66cc42048656ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a5f37b26bfe7933088e08379662250e7b188efbbb5784d49e8d86262e10416\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0061bf86592b301a0a9398d2938e614f76616ed7ddcf310aa50c961632fb58b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20eb41ef00b7711fdfef0e4f3bd83b2fbdb193be96cafc0ef87f048271e7dd78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fea878c0275a3934d26327c13cf5a93ae3b881132788f3d031af5b382bbdd02\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 17:04:35.064086 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 17:04:35.066802 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1773129596/tls.crt::/tmp/serving-cert-1773129596/tls.key\\\\\\\"\\\\nI1205 17:04:40.565607 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 17:04:40.585450 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 17:04:40.585477 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 17:04:40.585506 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 17:04:40.585513 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:04:40.600592 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:04:40.600617 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600622 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600626 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:04:40.600629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:04:40.600632 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:04:40.600635 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:04:40.600704 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 17:04:40.602558 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0585e546af65a61265267b97083556b811ad35ee6c48fa84262b3f4f25eaf907\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:47Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:47 crc kubenswrapper[4753]: I1205 17:05:47.848562 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:47Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:47 crc kubenswrapper[4753]: I1205 17:05:47.870555 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:47Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:47 crc kubenswrapper[4753]: I1205 17:05:47.883059 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vj5f7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f38be29-f040-4e7d-9026-36929c0c5cda\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b5d403644e00b367518058ecf22ed9913df9a1353e2fede8802deaa288a48e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc2cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vj5f7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:47Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:47 crc kubenswrapper[4753]: I1205 17:05:47.887502 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:47 crc kubenswrapper[4753]: I1205 17:05:47.887541 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:47 crc kubenswrapper[4753]: I1205 17:05:47.887550 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:47 crc kubenswrapper[4753]: I1205 17:05:47.887566 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:47 crc kubenswrapper[4753]: I1205 17:05:47.887576 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:47Z","lastTransitionTime":"2025-12-05T17:05:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:47 crc kubenswrapper[4753]: I1205 17:05:47.896650 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:47Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:47 crc kubenswrapper[4753]: I1205 17:05:47.910347 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65284411-042e-46b9-8e55-cabc9e78a397\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e83b48779d4b8b709f9bd7351180040315002f738ada5ab034c883f87ef8734\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f6e27e109eeb52dae4e4b35bdf84e78f0786fc8d15c2498dd8d8c70c3d64e4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7886d72b05768ec1d515cfb2837f52ae16e82445b72a3016762308b80892d55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a662a8ca2ca9bcf02ace463bf26e44293b9776c69f3f1f84110392c3cab6e83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:47Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:47 crc kubenswrapper[4753]: I1205 17:05:47.923206 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fb6627d3-0817-4737-b5c7-00b2abc67b0f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bba91e82256742fa5b98e2e481b8ab267ce89d71e180581f34fa371d03bbc1c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf90148205c7fde7c73b3ed1130fa5c9fc4b6562c0e1f4f6b009a72677f32f22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95027e80190e6caa0524ede7ef31c38c8861aeca94936b1a7a819fdc650c7969\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1bd3444395954ccae8d5c8137c6ebd7f62a6c864cea89f98a137af3e26cda8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1bd3444395954ccae8d5c8137c6ebd7f62a6c864cea89f98a137af3e26cda8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:47Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:47 crc kubenswrapper[4753]: I1205 17:05:47.940273 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2rg4s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"751d4d21-4eb8-4236-bca2-d81f094ff2f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bda355ba818e87604cf7cfba53f0ee3116f2d4234e7d9631ec7e6571ed34030\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa8d4e5672a07ea28ddba02eaaf603750cd3067ff47165776d2dc7f2ad42a290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa8d4e5672a07ea28ddba02eaaf603750cd3067ff47165776d2dc7f2ad42a290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87e685f0d2c857d143c3ee4b13adf2afe6796c40ae87e2a304b78783907a785a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87e685f0d2c857d143c3ee4b13adf2afe6796c40ae87e2a304b78783907a785a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://842a75d6b100069785dbaf413df3b610a5df28c9ec124100eb31c965985282fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://842a75d6b100069785dbaf413df3b610a5df28c9ec124100eb31c965985282fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4ac00f8ef346db3de91e7bec01a56eac9f27d5a501d6f686173ece4897b9694\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4ac00f8ef346db3de91e7bec01a56eac9f27d5a501d6f686173ece4897b9694\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3585437cbe0f17e5bf92a74123f0e6f13475b6e5d49b145f2b9b869559b53310\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3585437cbe0f17e5bf92a74123f0e6f13475b6e5d49b145f2b9b869559b53310\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e60aa96f2377e63e44e932efe51f79832b778b8ef41fd47e728a80286956c62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e60aa96f2377e63e44e932efe51f79832b778b8ef41fd47e728a80286956c62\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2rg4s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:47Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:47 crc kubenswrapper[4753]: I1205 17:05:47.958362 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f6585b3b62fc4a78f89b3413326d6d5259cb8c338936f2d5def6185d81d46f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cb72322d436f10b006a4bb7a91b255451aba90e86100fdf249be6443159bd89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d0273d889da0bd6ea3db8040da2eab62fc750f308cb9a62dee71b6c9f2fadd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e79e99eb44ff95c2f9929326313e41629e6e03ebc068537bd27bc4c89ad5b0ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20bcab2c2912120c819bacdf478db82a78500908125e6efadaef1f3409eb0d49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ead15f2a3179f03d770beadcaa1374561d5aebb4874615a334edccff0c2f300\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a20782d19103ad754e8771413e6ca949a706a6876c2e949437969541c112d24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a20782d19103ad754e8771413e6ca949a706a6876c2e949437969541c112d24\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:05:45Z\\\",\\\"message\\\":\\\"y.go:365] Adding new object: *v1.Pod openshift-multus/multus-additional-cni-plugins-2rg4s\\\\nI1205 17:05:45.473503 6840 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-operator-lifecycle-manager/package-server-manager-metrics]} name:Service_openshift-operator-lifecycle-manager/package-server-manager-metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.110:8443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {f9232b32-e89f-4c8e-acc4-c6801b70dcb0}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:NB_Global Row:map[] Rows:[] Columns:[] Mutations:[{Column:nb_cfg Mutator:+= Value:1}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {6011affd-30a6-4be6-872d-e4cf1ca780cf}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 17:05:45.473553 6840 ovn.go:134] Ensuring zone local for Pod openshift-multus/multus-additional-cni-plugins-2rg4s in node crc\\\\nI1205 17:05:45.473547 6840 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mut\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:05:44Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-98fvv_openshift-ovn-kubernetes(f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://899db0b5d45776b0560a2908ce63ec8ea2b458e2e492189737e930688b4ed2e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://073ffcef7324b498f2cb47680b64bf44e474a7732a5c2b34661da22c3e0f6c17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://073ffcef7324b498f2cb47680b64bf44e474a7732a5c2b34661da22c3e0f6c17\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-98fvv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:47Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:47 crc kubenswrapper[4753]: I1205 17:05:47.969958 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-f6qn6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"693c26bb-e75f-4f7f-bd2a-bdf7dcb0af06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02ea0085a961c324f4a426d68972043f4b24b3aa03aed623ebc46bbafc857d1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g4w6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-f6qn6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:47Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:47 crc kubenswrapper[4753]: I1205 17:05:47.983179 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bhvk4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"338d3da9-209c-4ca9-a37d-6ea5731d1622\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7747e4738055a7b994119bad9b30d47e0b510c7407c53df29b674c553dbbcbb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qh6zg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://097885ac231f081a81d51cf0091df6f93ca49068340438454db429194e5475b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qh6zg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-bhvk4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:47Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:47 crc kubenswrapper[4753]: I1205 17:05:47.990255 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:47 crc kubenswrapper[4753]: I1205 17:05:47.990284 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:47 crc kubenswrapper[4753]: I1205 17:05:47.990294 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:47 crc kubenswrapper[4753]: I1205 17:05:47.990311 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:47 crc kubenswrapper[4753]: I1205 17:05:47.990321 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:47Z","lastTransitionTime":"2025-12-05T17:05:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:47 crc kubenswrapper[4753]: I1205 17:05:47.996237 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69636b03-af7a-49ce-b136-0ea61ea1bc56\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8012c6347b457094531c3dfc5c7f18bec3f7cebb83d341ab1ed030443d6f4455\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e42b55a106572b6b023a9d60bea634c6a6d7600989d23ba444a6366fd4980a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e42b55a106572b6b023a9d60bea634c6a6d7600989d23ba444a6366fd4980a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:47Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:48 crc kubenswrapper[4753]: I1205 17:05:48.007494 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78c3b80f4d08dc9fd4215fc87bbd7331f765f301fa587f5fd4ded54cd8db6587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60c5d0ca3d26171050d592c3d76f0db4ca1a12344c40825a8e7c5fd579814c80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-khn68\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:48Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:48 crc kubenswrapper[4753]: I1205 17:05:48.018289 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-jjgfd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"00ab636b-9cc9-4a6f-8e6e-6442b35280ca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhlg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhlg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:57Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-jjgfd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:48Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:48 crc kubenswrapper[4753]: I1205 17:05:48.033219 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://408690fbd818b50273c914cc463acec30ff77bbf7d26680bffa2451a1d1ddcf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:48Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:48 crc kubenswrapper[4753]: I1205 17:05:48.092758 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:48 crc kubenswrapper[4753]: I1205 17:05:48.092837 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:48 crc kubenswrapper[4753]: I1205 17:05:48.092855 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:48 crc kubenswrapper[4753]: I1205 17:05:48.092880 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:48 crc kubenswrapper[4753]: I1205 17:05:48.092898 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:48Z","lastTransitionTime":"2025-12-05T17:05:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:48 crc kubenswrapper[4753]: I1205 17:05:48.195858 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:48 crc kubenswrapper[4753]: I1205 17:05:48.195923 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:48 crc kubenswrapper[4753]: I1205 17:05:48.195944 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:48 crc kubenswrapper[4753]: I1205 17:05:48.195969 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:48 crc kubenswrapper[4753]: I1205 17:05:48.195987 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:48Z","lastTransitionTime":"2025-12-05T17:05:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:48 crc kubenswrapper[4753]: I1205 17:05:48.298342 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:48 crc kubenswrapper[4753]: I1205 17:05:48.298478 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:48 crc kubenswrapper[4753]: I1205 17:05:48.298494 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:48 crc kubenswrapper[4753]: I1205 17:05:48.298516 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:48 crc kubenswrapper[4753]: I1205 17:05:48.298532 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:48Z","lastTransitionTime":"2025-12-05T17:05:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:48 crc kubenswrapper[4753]: I1205 17:05:48.401342 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:48 crc kubenswrapper[4753]: I1205 17:05:48.401401 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:48 crc kubenswrapper[4753]: I1205 17:05:48.401427 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:48 crc kubenswrapper[4753]: I1205 17:05:48.401455 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:48 crc kubenswrapper[4753]: I1205 17:05:48.401473 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:48Z","lastTransitionTime":"2025-12-05T17:05:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:48 crc kubenswrapper[4753]: I1205 17:05:48.504041 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:48 crc kubenswrapper[4753]: I1205 17:05:48.504126 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:48 crc kubenswrapper[4753]: I1205 17:05:48.504182 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:48 crc kubenswrapper[4753]: I1205 17:05:48.504217 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:48 crc kubenswrapper[4753]: I1205 17:05:48.504238 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:48Z","lastTransitionTime":"2025-12-05T17:05:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:48 crc kubenswrapper[4753]: I1205 17:05:48.607246 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:48 crc kubenswrapper[4753]: I1205 17:05:48.607315 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:48 crc kubenswrapper[4753]: I1205 17:05:48.607332 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:48 crc kubenswrapper[4753]: I1205 17:05:48.607353 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:48 crc kubenswrapper[4753]: I1205 17:05:48.607364 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:48Z","lastTransitionTime":"2025-12-05T17:05:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:48 crc kubenswrapper[4753]: I1205 17:05:48.709657 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:48 crc kubenswrapper[4753]: I1205 17:05:48.709687 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:48 crc kubenswrapper[4753]: I1205 17:05:48.709697 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:48 crc kubenswrapper[4753]: I1205 17:05:48.709710 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:48 crc kubenswrapper[4753]: I1205 17:05:48.709719 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:48Z","lastTransitionTime":"2025-12-05T17:05:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:48 crc kubenswrapper[4753]: I1205 17:05:48.719340 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jjgfd" Dec 05 17:05:48 crc kubenswrapper[4753]: E1205 17:05:48.719440 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jjgfd" podUID="00ab636b-9cc9-4a6f-8e6e-6442b35280ca" Dec 05 17:05:48 crc kubenswrapper[4753]: I1205 17:05:48.719556 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:05:48 crc kubenswrapper[4753]: E1205 17:05:48.719602 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:05:48 crc kubenswrapper[4753]: I1205 17:05:48.719699 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:05:48 crc kubenswrapper[4753]: E1205 17:05:48.719741 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:05:48 crc kubenswrapper[4753]: I1205 17:05:48.813038 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:48 crc kubenswrapper[4753]: I1205 17:05:48.813099 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:48 crc kubenswrapper[4753]: I1205 17:05:48.813115 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:48 crc kubenswrapper[4753]: I1205 17:05:48.813140 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:48 crc kubenswrapper[4753]: I1205 17:05:48.813176 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:48Z","lastTransitionTime":"2025-12-05T17:05:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:48 crc kubenswrapper[4753]: I1205 17:05:48.916184 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:48 crc kubenswrapper[4753]: I1205 17:05:48.916270 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:48 crc kubenswrapper[4753]: I1205 17:05:48.916296 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:48 crc kubenswrapper[4753]: I1205 17:05:48.916334 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:48 crc kubenswrapper[4753]: I1205 17:05:48.916358 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:48Z","lastTransitionTime":"2025-12-05T17:05:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:49 crc kubenswrapper[4753]: I1205 17:05:49.019172 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:49 crc kubenswrapper[4753]: I1205 17:05:49.019233 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:49 crc kubenswrapper[4753]: I1205 17:05:49.019253 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:49 crc kubenswrapper[4753]: I1205 17:05:49.019279 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:49 crc kubenswrapper[4753]: I1205 17:05:49.019297 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:49Z","lastTransitionTime":"2025-12-05T17:05:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:49 crc kubenswrapper[4753]: I1205 17:05:49.122530 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:49 crc kubenswrapper[4753]: I1205 17:05:49.122611 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:49 crc kubenswrapper[4753]: I1205 17:05:49.122636 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:49 crc kubenswrapper[4753]: I1205 17:05:49.122671 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:49 crc kubenswrapper[4753]: I1205 17:05:49.122692 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:49Z","lastTransitionTime":"2025-12-05T17:05:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:49 crc kubenswrapper[4753]: I1205 17:05:49.225855 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:49 crc kubenswrapper[4753]: I1205 17:05:49.225916 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:49 crc kubenswrapper[4753]: I1205 17:05:49.225935 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:49 crc kubenswrapper[4753]: I1205 17:05:49.225967 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:49 crc kubenswrapper[4753]: I1205 17:05:49.225991 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:49Z","lastTransitionTime":"2025-12-05T17:05:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:49 crc kubenswrapper[4753]: I1205 17:05:49.330057 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:49 crc kubenswrapper[4753]: I1205 17:05:49.330128 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:49 crc kubenswrapper[4753]: I1205 17:05:49.330144 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:49 crc kubenswrapper[4753]: I1205 17:05:49.330219 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:49 crc kubenswrapper[4753]: I1205 17:05:49.330233 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:49Z","lastTransitionTime":"2025-12-05T17:05:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:49 crc kubenswrapper[4753]: I1205 17:05:49.432736 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:49 crc kubenswrapper[4753]: I1205 17:05:49.432833 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:49 crc kubenswrapper[4753]: I1205 17:05:49.432857 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:49 crc kubenswrapper[4753]: I1205 17:05:49.432929 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:49 crc kubenswrapper[4753]: I1205 17:05:49.432953 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:49Z","lastTransitionTime":"2025-12-05T17:05:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:49 crc kubenswrapper[4753]: I1205 17:05:49.536890 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:49 crc kubenswrapper[4753]: I1205 17:05:49.537052 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:49 crc kubenswrapper[4753]: I1205 17:05:49.537084 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:49 crc kubenswrapper[4753]: I1205 17:05:49.537122 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:49 crc kubenswrapper[4753]: I1205 17:05:49.537181 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:49Z","lastTransitionTime":"2025-12-05T17:05:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:49 crc kubenswrapper[4753]: I1205 17:05:49.640771 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:49 crc kubenswrapper[4753]: I1205 17:05:49.640863 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:49 crc kubenswrapper[4753]: I1205 17:05:49.640892 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:49 crc kubenswrapper[4753]: I1205 17:05:49.640925 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:49 crc kubenswrapper[4753]: I1205 17:05:49.640948 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:49Z","lastTransitionTime":"2025-12-05T17:05:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:49 crc kubenswrapper[4753]: I1205 17:05:49.720389 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:05:49 crc kubenswrapper[4753]: E1205 17:05:49.720705 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:05:49 crc kubenswrapper[4753]: I1205 17:05:49.743793 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:49 crc kubenswrapper[4753]: I1205 17:05:49.743863 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:49 crc kubenswrapper[4753]: I1205 17:05:49.743894 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:49 crc kubenswrapper[4753]: I1205 17:05:49.743926 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:49 crc kubenswrapper[4753]: I1205 17:05:49.743951 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:49Z","lastTransitionTime":"2025-12-05T17:05:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:49 crc kubenswrapper[4753]: I1205 17:05:49.847531 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:49 crc kubenswrapper[4753]: I1205 17:05:49.847600 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:49 crc kubenswrapper[4753]: I1205 17:05:49.847617 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:49 crc kubenswrapper[4753]: I1205 17:05:49.847648 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:49 crc kubenswrapper[4753]: I1205 17:05:49.847666 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:49Z","lastTransitionTime":"2025-12-05T17:05:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:49 crc kubenswrapper[4753]: I1205 17:05:49.951306 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:49 crc kubenswrapper[4753]: I1205 17:05:49.951367 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:49 crc kubenswrapper[4753]: I1205 17:05:49.951388 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:49 crc kubenswrapper[4753]: I1205 17:05:49.951409 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:49 crc kubenswrapper[4753]: I1205 17:05:49.951423 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:49Z","lastTransitionTime":"2025-12-05T17:05:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:50 crc kubenswrapper[4753]: I1205 17:05:50.054654 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:50 crc kubenswrapper[4753]: I1205 17:05:50.054731 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:50 crc kubenswrapper[4753]: I1205 17:05:50.054756 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:50 crc kubenswrapper[4753]: I1205 17:05:50.054785 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:50 crc kubenswrapper[4753]: I1205 17:05:50.054807 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:50Z","lastTransitionTime":"2025-12-05T17:05:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:50 crc kubenswrapper[4753]: I1205 17:05:50.159271 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:50 crc kubenswrapper[4753]: I1205 17:05:50.159361 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:50 crc kubenswrapper[4753]: I1205 17:05:50.159374 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:50 crc kubenswrapper[4753]: I1205 17:05:50.159399 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:50 crc kubenswrapper[4753]: I1205 17:05:50.159412 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:50Z","lastTransitionTime":"2025-12-05T17:05:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:50 crc kubenswrapper[4753]: I1205 17:05:50.262279 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:50 crc kubenswrapper[4753]: I1205 17:05:50.262337 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:50 crc kubenswrapper[4753]: I1205 17:05:50.262350 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:50 crc kubenswrapper[4753]: I1205 17:05:50.262371 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:50 crc kubenswrapper[4753]: I1205 17:05:50.262389 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:50Z","lastTransitionTime":"2025-12-05T17:05:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:50 crc kubenswrapper[4753]: I1205 17:05:50.366349 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:50 crc kubenswrapper[4753]: I1205 17:05:50.366404 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:50 crc kubenswrapper[4753]: I1205 17:05:50.366418 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:50 crc kubenswrapper[4753]: I1205 17:05:50.366437 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:50 crc kubenswrapper[4753]: I1205 17:05:50.366453 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:50Z","lastTransitionTime":"2025-12-05T17:05:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:50 crc kubenswrapper[4753]: I1205 17:05:50.469441 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:50 crc kubenswrapper[4753]: I1205 17:05:50.469478 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:50 crc kubenswrapper[4753]: I1205 17:05:50.469490 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:50 crc kubenswrapper[4753]: I1205 17:05:50.469506 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:50 crc kubenswrapper[4753]: I1205 17:05:50.469519 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:50Z","lastTransitionTime":"2025-12-05T17:05:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:50 crc kubenswrapper[4753]: I1205 17:05:50.573038 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:50 crc kubenswrapper[4753]: I1205 17:05:50.573089 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:50 crc kubenswrapper[4753]: I1205 17:05:50.573102 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:50 crc kubenswrapper[4753]: I1205 17:05:50.573124 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:50 crc kubenswrapper[4753]: I1205 17:05:50.573139 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:50Z","lastTransitionTime":"2025-12-05T17:05:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:50 crc kubenswrapper[4753]: I1205 17:05:50.676847 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:50 crc kubenswrapper[4753]: I1205 17:05:50.676887 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:50 crc kubenswrapper[4753]: I1205 17:05:50.676899 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:50 crc kubenswrapper[4753]: I1205 17:05:50.676922 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:50 crc kubenswrapper[4753]: I1205 17:05:50.676937 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:50Z","lastTransitionTime":"2025-12-05T17:05:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:50 crc kubenswrapper[4753]: I1205 17:05:50.720452 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:05:50 crc kubenswrapper[4753]: I1205 17:05:50.720499 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jjgfd" Dec 05 17:05:50 crc kubenswrapper[4753]: I1205 17:05:50.720602 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:05:50 crc kubenswrapper[4753]: E1205 17:05:50.720655 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:05:50 crc kubenswrapper[4753]: E1205 17:05:50.720840 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jjgfd" podUID="00ab636b-9cc9-4a6f-8e6e-6442b35280ca" Dec 05 17:05:50 crc kubenswrapper[4753]: E1205 17:05:50.720913 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:05:50 crc kubenswrapper[4753]: I1205 17:05:50.781134 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:50 crc kubenswrapper[4753]: I1205 17:05:50.781366 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:50 crc kubenswrapper[4753]: I1205 17:05:50.781389 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:50 crc kubenswrapper[4753]: I1205 17:05:50.781415 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:50 crc kubenswrapper[4753]: I1205 17:05:50.781435 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:50Z","lastTransitionTime":"2025-12-05T17:05:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:50 crc kubenswrapper[4753]: I1205 17:05:50.885238 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:50 crc kubenswrapper[4753]: I1205 17:05:50.885321 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:50 crc kubenswrapper[4753]: I1205 17:05:50.885345 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:50 crc kubenswrapper[4753]: I1205 17:05:50.885382 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:50 crc kubenswrapper[4753]: I1205 17:05:50.885405 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:50Z","lastTransitionTime":"2025-12-05T17:05:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:50 crc kubenswrapper[4753]: I1205 17:05:50.988943 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:50 crc kubenswrapper[4753]: I1205 17:05:50.989037 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:50 crc kubenswrapper[4753]: I1205 17:05:50.989068 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:50 crc kubenswrapper[4753]: I1205 17:05:50.989105 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:50 crc kubenswrapper[4753]: I1205 17:05:50.989125 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:50Z","lastTransitionTime":"2025-12-05T17:05:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:51 crc kubenswrapper[4753]: I1205 17:05:51.092971 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:51 crc kubenswrapper[4753]: I1205 17:05:51.093034 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:51 crc kubenswrapper[4753]: I1205 17:05:51.093050 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:51 crc kubenswrapper[4753]: I1205 17:05:51.093074 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:51 crc kubenswrapper[4753]: I1205 17:05:51.093087 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:51Z","lastTransitionTime":"2025-12-05T17:05:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:51 crc kubenswrapper[4753]: I1205 17:05:51.196728 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:51 crc kubenswrapper[4753]: I1205 17:05:51.196810 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:51 crc kubenswrapper[4753]: I1205 17:05:51.196833 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:51 crc kubenswrapper[4753]: I1205 17:05:51.196866 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:51 crc kubenswrapper[4753]: I1205 17:05:51.196889 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:51Z","lastTransitionTime":"2025-12-05T17:05:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:51 crc kubenswrapper[4753]: I1205 17:05:51.300720 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:51 crc kubenswrapper[4753]: I1205 17:05:51.300885 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:51 crc kubenswrapper[4753]: I1205 17:05:51.300906 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:51 crc kubenswrapper[4753]: I1205 17:05:51.300934 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:51 crc kubenswrapper[4753]: I1205 17:05:51.300954 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:51Z","lastTransitionTime":"2025-12-05T17:05:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:51 crc kubenswrapper[4753]: I1205 17:05:51.405118 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:51 crc kubenswrapper[4753]: I1205 17:05:51.405221 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:51 crc kubenswrapper[4753]: I1205 17:05:51.405248 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:51 crc kubenswrapper[4753]: I1205 17:05:51.405285 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:51 crc kubenswrapper[4753]: I1205 17:05:51.405314 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:51Z","lastTransitionTime":"2025-12-05T17:05:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:51 crc kubenswrapper[4753]: I1205 17:05:51.509470 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:51 crc kubenswrapper[4753]: I1205 17:05:51.509584 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:51 crc kubenswrapper[4753]: I1205 17:05:51.509625 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:51 crc kubenswrapper[4753]: I1205 17:05:51.509661 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:51 crc kubenswrapper[4753]: I1205 17:05:51.509684 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:51Z","lastTransitionTime":"2025-12-05T17:05:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:51 crc kubenswrapper[4753]: I1205 17:05:51.613835 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:51 crc kubenswrapper[4753]: I1205 17:05:51.613905 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:51 crc kubenswrapper[4753]: I1205 17:05:51.613925 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:51 crc kubenswrapper[4753]: I1205 17:05:51.613949 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:51 crc kubenswrapper[4753]: I1205 17:05:51.613963 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:51Z","lastTransitionTime":"2025-12-05T17:05:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:51 crc kubenswrapper[4753]: I1205 17:05:51.717264 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:51 crc kubenswrapper[4753]: I1205 17:05:51.717316 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:51 crc kubenswrapper[4753]: I1205 17:05:51.717329 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:51 crc kubenswrapper[4753]: I1205 17:05:51.717351 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:51 crc kubenswrapper[4753]: I1205 17:05:51.717365 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:51Z","lastTransitionTime":"2025-12-05T17:05:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:51 crc kubenswrapper[4753]: I1205 17:05:51.719854 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:05:51 crc kubenswrapper[4753]: E1205 17:05:51.719987 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:05:51 crc kubenswrapper[4753]: I1205 17:05:51.740947 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://408690fbd818b50273c914cc463acec30ff77bbf7d26680bffa2451a1d1ddcf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:51Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:51 crc kubenswrapper[4753]: I1205 17:05:51.759836 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78c3b80f4d08dc9fd4215fc87bbd7331f765f301fa587f5fd4ded54cd8db6587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60c5d0ca3d26171050d592c3d76f0db4ca1a12344c40825a8e7c5fd579814c80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-khn68\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:51Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:51 crc kubenswrapper[4753]: I1205 17:05:51.772873 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-jjgfd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"00ab636b-9cc9-4a6f-8e6e-6442b35280ca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhlg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhlg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:57Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-jjgfd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:51Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:51 crc kubenswrapper[4753]: I1205 17:05:51.809066 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee37fdbe-64d5-4bb1-8522-932d83e0648e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://195b8bf18e6cf937def214e377ec9def799c19ea4d5f008b66cc42048656ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a5f37b26bfe7933088e08379662250e7b188efbbb5784d49e8d86262e10416\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0061bf86592b301a0a9398d2938e614f76616ed7ddcf310aa50c961632fb58b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20eb41ef00b7711fdfef0e4f3bd83b2fbdb193be96cafc0ef87f048271e7dd78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fea878c0275a3934d26327c13cf5a93ae3b881132788f3d031af5b382bbdd02\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 17:04:35.064086 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 17:04:35.066802 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1773129596/tls.crt::/tmp/serving-cert-1773129596/tls.key\\\\\\\"\\\\nI1205 17:04:40.565607 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 17:04:40.585450 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 17:04:40.585477 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 17:04:40.585506 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 17:04:40.585513 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:04:40.600592 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:04:40.600617 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600622 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600626 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:04:40.600629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:04:40.600632 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:04:40.600635 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:04:40.600704 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 17:04:40.602558 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0585e546af65a61265267b97083556b811ad35ee6c48fa84262b3f4f25eaf907\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:51Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:51 crc kubenswrapper[4753]: I1205 17:05:51.819619 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:51 crc kubenswrapper[4753]: I1205 17:05:51.819664 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:51 crc kubenswrapper[4753]: I1205 17:05:51.819678 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:51 crc kubenswrapper[4753]: I1205 17:05:51.819698 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:51 crc kubenswrapper[4753]: I1205 17:05:51.819709 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:51Z","lastTransitionTime":"2025-12-05T17:05:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:51 crc kubenswrapper[4753]: I1205 17:05:51.841390 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8934d1ec7490b421708e4678d69a55cdd381563ada3d945d066fcf4abba49f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e799acff1e50a664e95e30896e435bc77a35c8560044c5f180e16c3534bcf828\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:51Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:51 crc kubenswrapper[4753]: I1205 17:05:51.864532 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c52dbe30298514a572fd7cf4a301357ef0290af5894be7f9d08aff6c3fbe85a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:51Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:51 crc kubenswrapper[4753]: I1205 17:05:51.879535 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hpl8r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1096ebd14458819c0dbe4f5a86f2812eb19236d7d5062bfe2eda62ecdc05c55e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b2cc49130697a15123f202462d2f0781903baea370d4906d22d1b39c23320d7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:05:30Z\\\",\\\"message\\\":\\\"2025-12-05T17:04:44+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_2d7d6f8b-5939-404d-a339-48cbe066dc8a\\\\n2025-12-05T17:04:44+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_2d7d6f8b-5939-404d-a339-48cbe066dc8a to /host/opt/cni/bin/\\\\n2025-12-05T17:04:44Z [verbose] multus-daemon started\\\\n2025-12-05T17:04:44Z [verbose] Readiness Indicator file check\\\\n2025-12-05T17:05:29Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:05:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmzrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hpl8r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:51Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:51 crc kubenswrapper[4753]: I1205 17:05:51.890722 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:51Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:51 crc kubenswrapper[4753]: I1205 17:05:51.904262 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:51Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:51 crc kubenswrapper[4753]: I1205 17:05:51.917521 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:51Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:51 crc kubenswrapper[4753]: I1205 17:05:51.921987 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:51 crc kubenswrapper[4753]: I1205 17:05:51.922020 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:51 crc kubenswrapper[4753]: I1205 17:05:51.922033 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:51 crc kubenswrapper[4753]: I1205 17:05:51.922052 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:51 crc kubenswrapper[4753]: I1205 17:05:51.922063 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:51Z","lastTransitionTime":"2025-12-05T17:05:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:51 crc kubenswrapper[4753]: I1205 17:05:51.928822 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vj5f7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f38be29-f040-4e7d-9026-36929c0c5cda\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b5d403644e00b367518058ecf22ed9913df9a1353e2fede8802deaa288a48e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc2cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vj5f7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:51Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:51 crc kubenswrapper[4753]: I1205 17:05:51.947353 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f6585b3b62fc4a78f89b3413326d6d5259cb8c338936f2d5def6185d81d46f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cb72322d436f10b006a4bb7a91b255451aba90e86100fdf249be6443159bd89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d0273d889da0bd6ea3db8040da2eab62fc750f308cb9a62dee71b6c9f2fadd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e79e99eb44ff95c2f9929326313e41629e6e03ebc068537bd27bc4c89ad5b0ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20bcab2c2912120c819bacdf478db82a78500908125e6efadaef1f3409eb0d49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ead15f2a3179f03d770beadcaa1374561d5aebb4874615a334edccff0c2f300\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a20782d19103ad754e8771413e6ca949a706a6876c2e949437969541c112d24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a20782d19103ad754e8771413e6ca949a706a6876c2e949437969541c112d24\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:05:45Z\\\",\\\"message\\\":\\\"y.go:365] Adding new object: *v1.Pod openshift-multus/multus-additional-cni-plugins-2rg4s\\\\nI1205 17:05:45.473503 6840 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-operator-lifecycle-manager/package-server-manager-metrics]} name:Service_openshift-operator-lifecycle-manager/package-server-manager-metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.110:8443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {f9232b32-e89f-4c8e-acc4-c6801b70dcb0}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:NB_Global Row:map[] Rows:[] Columns:[] Mutations:[{Column:nb_cfg Mutator:+= Value:1}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {6011affd-30a6-4be6-872d-e4cf1ca780cf}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 17:05:45.473553 6840 ovn.go:134] Ensuring zone local for Pod openshift-multus/multus-additional-cni-plugins-2rg4s in node crc\\\\nI1205 17:05:45.473547 6840 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mut\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:05:44Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-98fvv_openshift-ovn-kubernetes(f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://899db0b5d45776b0560a2908ce63ec8ea2b458e2e492189737e930688b4ed2e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://073ffcef7324b498f2cb47680b64bf44e474a7732a5c2b34661da22c3e0f6c17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://073ffcef7324b498f2cb47680b64bf44e474a7732a5c2b34661da22c3e0f6c17\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-98fvv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:51Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:51 crc kubenswrapper[4753]: I1205 17:05:51.957880 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-f6qn6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"693c26bb-e75f-4f7f-bd2a-bdf7dcb0af06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02ea0085a961c324f4a426d68972043f4b24b3aa03aed623ebc46bbafc857d1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g4w6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-f6qn6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:51Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:51 crc kubenswrapper[4753]: I1205 17:05:51.969512 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bhvk4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"338d3da9-209c-4ca9-a37d-6ea5731d1622\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7747e4738055a7b994119bad9b30d47e0b510c7407c53df29b674c553dbbcbb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qh6zg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://097885ac231f081a81d51cf0091df6f93ca49068340438454db429194e5475b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qh6zg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-bhvk4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:51Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:51 crc kubenswrapper[4753]: I1205 17:05:51.981704 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69636b03-af7a-49ce-b136-0ea61ea1bc56\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8012c6347b457094531c3dfc5c7f18bec3f7cebb83d341ab1ed030443d6f4455\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e42b55a106572b6b023a9d60bea634c6a6d7600989d23ba444a6366fd4980a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e42b55a106572b6b023a9d60bea634c6a6d7600989d23ba444a6366fd4980a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:51Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:51 crc kubenswrapper[4753]: I1205 17:05:51.994143 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65284411-042e-46b9-8e55-cabc9e78a397\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e83b48779d4b8b709f9bd7351180040315002f738ada5ab034c883f87ef8734\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f6e27e109eeb52dae4e4b35bdf84e78f0786fc8d15c2498dd8d8c70c3d64e4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7886d72b05768ec1d515cfb2837f52ae16e82445b72a3016762308b80892d55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a662a8ca2ca9bcf02ace463bf26e44293b9776c69f3f1f84110392c3cab6e83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:51Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:52 crc kubenswrapper[4753]: I1205 17:05:52.006718 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fb6627d3-0817-4737-b5c7-00b2abc67b0f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bba91e82256742fa5b98e2e481b8ab267ce89d71e180581f34fa371d03bbc1c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf90148205c7fde7c73b3ed1130fa5c9fc4b6562c0e1f4f6b009a72677f32f22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95027e80190e6caa0524ede7ef31c38c8861aeca94936b1a7a819fdc650c7969\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1bd3444395954ccae8d5c8137c6ebd7f62a6c864cea89f98a137af3e26cda8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1bd3444395954ccae8d5c8137c6ebd7f62a6c864cea89f98a137af3e26cda8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:52Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:52 crc kubenswrapper[4753]: I1205 17:05:52.024626 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:52 crc kubenswrapper[4753]: I1205 17:05:52.024658 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:52 crc kubenswrapper[4753]: I1205 17:05:52.024667 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:52 crc kubenswrapper[4753]: I1205 17:05:52.024681 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:52 crc kubenswrapper[4753]: I1205 17:05:52.024690 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:52Z","lastTransitionTime":"2025-12-05T17:05:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:52 crc kubenswrapper[4753]: I1205 17:05:52.036066 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2rg4s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"751d4d21-4eb8-4236-bca2-d81f094ff2f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bda355ba818e87604cf7cfba53f0ee3116f2d4234e7d9631ec7e6571ed34030\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa8d4e5672a07ea28ddba02eaaf603750cd3067ff47165776d2dc7f2ad42a290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa8d4e5672a07ea28ddba02eaaf603750cd3067ff47165776d2dc7f2ad42a290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87e685f0d2c857d143c3ee4b13adf2afe6796c40ae87e2a304b78783907a785a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87e685f0d2c857d143c3ee4b13adf2afe6796c40ae87e2a304b78783907a785a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://842a75d6b100069785dbaf413df3b610a5df28c9ec124100eb31c965985282fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://842a75d6b100069785dbaf413df3b610a5df28c9ec124100eb31c965985282fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4ac00f8ef346db3de91e7bec01a56eac9f27d5a501d6f686173ece4897b9694\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4ac00f8ef346db3de91e7bec01a56eac9f27d5a501d6f686173ece4897b9694\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3585437cbe0f17e5bf92a74123f0e6f13475b6e5d49b145f2b9b869559b53310\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3585437cbe0f17e5bf92a74123f0e6f13475b6e5d49b145f2b9b869559b53310\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e60aa96f2377e63e44e932efe51f79832b778b8ef41fd47e728a80286956c62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e60aa96f2377e63e44e932efe51f79832b778b8ef41fd47e728a80286956c62\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2rg4s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:52Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:52 crc kubenswrapper[4753]: I1205 17:05:52.128707 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:52 crc kubenswrapper[4753]: I1205 17:05:52.128769 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:52 crc kubenswrapper[4753]: I1205 17:05:52.128793 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:52 crc kubenswrapper[4753]: I1205 17:05:52.128824 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:52 crc kubenswrapper[4753]: I1205 17:05:52.128845 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:52Z","lastTransitionTime":"2025-12-05T17:05:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:52 crc kubenswrapper[4753]: I1205 17:05:52.232429 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:52 crc kubenswrapper[4753]: I1205 17:05:52.232494 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:52 crc kubenswrapper[4753]: I1205 17:05:52.232509 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:52 crc kubenswrapper[4753]: I1205 17:05:52.232527 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:52 crc kubenswrapper[4753]: I1205 17:05:52.232541 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:52Z","lastTransitionTime":"2025-12-05T17:05:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:52 crc kubenswrapper[4753]: I1205 17:05:52.336324 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:52 crc kubenswrapper[4753]: I1205 17:05:52.336460 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:52 crc kubenswrapper[4753]: I1205 17:05:52.336485 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:52 crc kubenswrapper[4753]: I1205 17:05:52.336521 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:52 crc kubenswrapper[4753]: I1205 17:05:52.336549 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:52Z","lastTransitionTime":"2025-12-05T17:05:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:52 crc kubenswrapper[4753]: I1205 17:05:52.440129 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:52 crc kubenswrapper[4753]: I1205 17:05:52.440197 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:52 crc kubenswrapper[4753]: I1205 17:05:52.440206 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:52 crc kubenswrapper[4753]: I1205 17:05:52.440228 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:52 crc kubenswrapper[4753]: I1205 17:05:52.440264 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:52Z","lastTransitionTime":"2025-12-05T17:05:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:52 crc kubenswrapper[4753]: I1205 17:05:52.544273 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:52 crc kubenswrapper[4753]: I1205 17:05:52.544326 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:52 crc kubenswrapper[4753]: I1205 17:05:52.544340 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:52 crc kubenswrapper[4753]: I1205 17:05:52.544354 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:52 crc kubenswrapper[4753]: I1205 17:05:52.544363 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:52Z","lastTransitionTime":"2025-12-05T17:05:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:52 crc kubenswrapper[4753]: I1205 17:05:52.647436 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:52 crc kubenswrapper[4753]: I1205 17:05:52.647487 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:52 crc kubenswrapper[4753]: I1205 17:05:52.647499 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:52 crc kubenswrapper[4753]: I1205 17:05:52.647529 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:52 crc kubenswrapper[4753]: I1205 17:05:52.647539 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:52Z","lastTransitionTime":"2025-12-05T17:05:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:52 crc kubenswrapper[4753]: I1205 17:05:52.719963 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:05:52 crc kubenswrapper[4753]: I1205 17:05:52.719975 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:05:52 crc kubenswrapper[4753]: I1205 17:05:52.720121 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jjgfd" Dec 05 17:05:52 crc kubenswrapper[4753]: E1205 17:05:52.720251 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:05:52 crc kubenswrapper[4753]: E1205 17:05:52.720191 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:05:52 crc kubenswrapper[4753]: E1205 17:05:52.720468 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jjgfd" podUID="00ab636b-9cc9-4a6f-8e6e-6442b35280ca" Dec 05 17:05:52 crc kubenswrapper[4753]: I1205 17:05:52.750519 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:52 crc kubenswrapper[4753]: I1205 17:05:52.750616 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:52 crc kubenswrapper[4753]: I1205 17:05:52.750645 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:52 crc kubenswrapper[4753]: I1205 17:05:52.750681 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:52 crc kubenswrapper[4753]: I1205 17:05:52.750701 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:52Z","lastTransitionTime":"2025-12-05T17:05:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:52 crc kubenswrapper[4753]: I1205 17:05:52.853206 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:52 crc kubenswrapper[4753]: I1205 17:05:52.853278 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:52 crc kubenswrapper[4753]: I1205 17:05:52.853296 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:52 crc kubenswrapper[4753]: I1205 17:05:52.853320 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:52 crc kubenswrapper[4753]: I1205 17:05:52.853340 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:52Z","lastTransitionTime":"2025-12-05T17:05:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:52 crc kubenswrapper[4753]: I1205 17:05:52.956837 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:52 crc kubenswrapper[4753]: I1205 17:05:52.956907 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:52 crc kubenswrapper[4753]: I1205 17:05:52.956923 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:52 crc kubenswrapper[4753]: I1205 17:05:52.956944 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:52 crc kubenswrapper[4753]: I1205 17:05:52.956960 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:52Z","lastTransitionTime":"2025-12-05T17:05:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:53 crc kubenswrapper[4753]: I1205 17:05:53.060367 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:53 crc kubenswrapper[4753]: I1205 17:05:53.060426 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:53 crc kubenswrapper[4753]: I1205 17:05:53.060434 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:53 crc kubenswrapper[4753]: I1205 17:05:53.060453 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:53 crc kubenswrapper[4753]: I1205 17:05:53.060464 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:53Z","lastTransitionTime":"2025-12-05T17:05:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:53 crc kubenswrapper[4753]: I1205 17:05:53.163125 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:53 crc kubenswrapper[4753]: I1205 17:05:53.163184 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:53 crc kubenswrapper[4753]: I1205 17:05:53.163195 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:53 crc kubenswrapper[4753]: I1205 17:05:53.163208 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:53 crc kubenswrapper[4753]: I1205 17:05:53.163218 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:53Z","lastTransitionTime":"2025-12-05T17:05:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:53 crc kubenswrapper[4753]: I1205 17:05:53.265256 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:53 crc kubenswrapper[4753]: I1205 17:05:53.265293 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:53 crc kubenswrapper[4753]: I1205 17:05:53.265305 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:53 crc kubenswrapper[4753]: I1205 17:05:53.265321 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:53 crc kubenswrapper[4753]: I1205 17:05:53.265330 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:53Z","lastTransitionTime":"2025-12-05T17:05:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:53 crc kubenswrapper[4753]: I1205 17:05:53.367133 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:53 crc kubenswrapper[4753]: I1205 17:05:53.367192 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:53 crc kubenswrapper[4753]: I1205 17:05:53.367203 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:53 crc kubenswrapper[4753]: I1205 17:05:53.367221 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:53 crc kubenswrapper[4753]: I1205 17:05:53.367232 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:53Z","lastTransitionTime":"2025-12-05T17:05:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:53 crc kubenswrapper[4753]: I1205 17:05:53.469583 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:53 crc kubenswrapper[4753]: I1205 17:05:53.469667 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:53 crc kubenswrapper[4753]: I1205 17:05:53.469682 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:53 crc kubenswrapper[4753]: I1205 17:05:53.469700 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:53 crc kubenswrapper[4753]: I1205 17:05:53.469713 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:53Z","lastTransitionTime":"2025-12-05T17:05:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:53 crc kubenswrapper[4753]: I1205 17:05:53.571457 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:53 crc kubenswrapper[4753]: I1205 17:05:53.571516 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:53 crc kubenswrapper[4753]: I1205 17:05:53.571542 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:53 crc kubenswrapper[4753]: I1205 17:05:53.571556 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:53 crc kubenswrapper[4753]: I1205 17:05:53.571565 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:53Z","lastTransitionTime":"2025-12-05T17:05:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:53 crc kubenswrapper[4753]: I1205 17:05:53.674001 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:53 crc kubenswrapper[4753]: I1205 17:05:53.674047 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:53 crc kubenswrapper[4753]: I1205 17:05:53.674072 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:53 crc kubenswrapper[4753]: I1205 17:05:53.674099 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:53 crc kubenswrapper[4753]: I1205 17:05:53.674117 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:53Z","lastTransitionTime":"2025-12-05T17:05:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:53 crc kubenswrapper[4753]: I1205 17:05:53.720418 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:05:53 crc kubenswrapper[4753]: E1205 17:05:53.720583 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:05:53 crc kubenswrapper[4753]: I1205 17:05:53.776019 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:53 crc kubenswrapper[4753]: I1205 17:05:53.776055 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:53 crc kubenswrapper[4753]: I1205 17:05:53.776063 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:53 crc kubenswrapper[4753]: I1205 17:05:53.776075 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:53 crc kubenswrapper[4753]: I1205 17:05:53.776085 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:53Z","lastTransitionTime":"2025-12-05T17:05:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:53 crc kubenswrapper[4753]: I1205 17:05:53.878755 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:53 crc kubenswrapper[4753]: I1205 17:05:53.878799 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:53 crc kubenswrapper[4753]: I1205 17:05:53.878812 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:53 crc kubenswrapper[4753]: I1205 17:05:53.878831 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:53 crc kubenswrapper[4753]: I1205 17:05:53.878847 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:53Z","lastTransitionTime":"2025-12-05T17:05:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:53 crc kubenswrapper[4753]: I1205 17:05:53.982078 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:53 crc kubenswrapper[4753]: I1205 17:05:53.982130 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:53 crc kubenswrapper[4753]: I1205 17:05:53.982142 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:53 crc kubenswrapper[4753]: I1205 17:05:53.982200 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:53 crc kubenswrapper[4753]: I1205 17:05:53.982224 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:53Z","lastTransitionTime":"2025-12-05T17:05:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:54 crc kubenswrapper[4753]: I1205 17:05:54.085688 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:54 crc kubenswrapper[4753]: I1205 17:05:54.085747 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:54 crc kubenswrapper[4753]: I1205 17:05:54.085762 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:54 crc kubenswrapper[4753]: I1205 17:05:54.085780 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:54 crc kubenswrapper[4753]: I1205 17:05:54.085791 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:54Z","lastTransitionTime":"2025-12-05T17:05:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:54 crc kubenswrapper[4753]: I1205 17:05:54.188323 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:54 crc kubenswrapper[4753]: I1205 17:05:54.188371 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:54 crc kubenswrapper[4753]: I1205 17:05:54.188381 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:54 crc kubenswrapper[4753]: I1205 17:05:54.188401 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:54 crc kubenswrapper[4753]: I1205 17:05:54.188413 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:54Z","lastTransitionTime":"2025-12-05T17:05:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:54 crc kubenswrapper[4753]: I1205 17:05:54.291883 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:54 crc kubenswrapper[4753]: I1205 17:05:54.291945 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:54 crc kubenswrapper[4753]: I1205 17:05:54.291957 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:54 crc kubenswrapper[4753]: I1205 17:05:54.291980 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:54 crc kubenswrapper[4753]: I1205 17:05:54.292498 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:54Z","lastTransitionTime":"2025-12-05T17:05:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:54 crc kubenswrapper[4753]: I1205 17:05:54.395402 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:54 crc kubenswrapper[4753]: I1205 17:05:54.395439 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:54 crc kubenswrapper[4753]: I1205 17:05:54.395471 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:54 crc kubenswrapper[4753]: I1205 17:05:54.395487 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:54 crc kubenswrapper[4753]: I1205 17:05:54.395496 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:54Z","lastTransitionTime":"2025-12-05T17:05:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:54 crc kubenswrapper[4753]: I1205 17:05:54.498504 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:54 crc kubenswrapper[4753]: I1205 17:05:54.498546 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:54 crc kubenswrapper[4753]: I1205 17:05:54.498556 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:54 crc kubenswrapper[4753]: I1205 17:05:54.498571 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:54 crc kubenswrapper[4753]: I1205 17:05:54.498582 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:54Z","lastTransitionTime":"2025-12-05T17:05:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:54 crc kubenswrapper[4753]: I1205 17:05:54.602397 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:54 crc kubenswrapper[4753]: I1205 17:05:54.602455 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:54 crc kubenswrapper[4753]: I1205 17:05:54.602472 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:54 crc kubenswrapper[4753]: I1205 17:05:54.602496 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:54 crc kubenswrapper[4753]: I1205 17:05:54.602515 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:54Z","lastTransitionTime":"2025-12-05T17:05:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:54 crc kubenswrapper[4753]: I1205 17:05:54.705514 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:54 crc kubenswrapper[4753]: I1205 17:05:54.705676 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:54 crc kubenswrapper[4753]: I1205 17:05:54.705703 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:54 crc kubenswrapper[4753]: I1205 17:05:54.705740 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:54 crc kubenswrapper[4753]: I1205 17:05:54.705768 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:54Z","lastTransitionTime":"2025-12-05T17:05:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:54 crc kubenswrapper[4753]: I1205 17:05:54.719989 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jjgfd" Dec 05 17:05:54 crc kubenswrapper[4753]: I1205 17:05:54.720036 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:05:54 crc kubenswrapper[4753]: I1205 17:05:54.719988 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:05:54 crc kubenswrapper[4753]: E1205 17:05:54.720273 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jjgfd" podUID="00ab636b-9cc9-4a6f-8e6e-6442b35280ca" Dec 05 17:05:54 crc kubenswrapper[4753]: E1205 17:05:54.720415 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:05:54 crc kubenswrapper[4753]: E1205 17:05:54.720656 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:05:54 crc kubenswrapper[4753]: I1205 17:05:54.809931 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:54 crc kubenswrapper[4753]: I1205 17:05:54.809983 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:54 crc kubenswrapper[4753]: I1205 17:05:54.810000 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:54 crc kubenswrapper[4753]: I1205 17:05:54.810027 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:54 crc kubenswrapper[4753]: I1205 17:05:54.810043 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:54Z","lastTransitionTime":"2025-12-05T17:05:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:54 crc kubenswrapper[4753]: I1205 17:05:54.913861 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:54 crc kubenswrapper[4753]: I1205 17:05:54.913923 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:54 crc kubenswrapper[4753]: I1205 17:05:54.913942 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:54 crc kubenswrapper[4753]: I1205 17:05:54.913981 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:54 crc kubenswrapper[4753]: I1205 17:05:54.914023 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:54Z","lastTransitionTime":"2025-12-05T17:05:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:55 crc kubenswrapper[4753]: I1205 17:05:55.017668 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:55 crc kubenswrapper[4753]: I1205 17:05:55.017738 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:55 crc kubenswrapper[4753]: I1205 17:05:55.017758 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:55 crc kubenswrapper[4753]: I1205 17:05:55.017789 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:55 crc kubenswrapper[4753]: I1205 17:05:55.018012 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:55Z","lastTransitionTime":"2025-12-05T17:05:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:55 crc kubenswrapper[4753]: I1205 17:05:55.120785 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:55 crc kubenswrapper[4753]: I1205 17:05:55.120862 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:55 crc kubenswrapper[4753]: I1205 17:05:55.120888 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:55 crc kubenswrapper[4753]: I1205 17:05:55.120926 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:55 crc kubenswrapper[4753]: I1205 17:05:55.120952 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:55Z","lastTransitionTime":"2025-12-05T17:05:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:55 crc kubenswrapper[4753]: I1205 17:05:55.224234 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:55 crc kubenswrapper[4753]: I1205 17:05:55.224299 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:55 crc kubenswrapper[4753]: I1205 17:05:55.224319 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:55 crc kubenswrapper[4753]: I1205 17:05:55.224346 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:55 crc kubenswrapper[4753]: I1205 17:05:55.224365 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:55Z","lastTransitionTime":"2025-12-05T17:05:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:55 crc kubenswrapper[4753]: I1205 17:05:55.328064 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:55 crc kubenswrapper[4753]: I1205 17:05:55.328116 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:55 crc kubenswrapper[4753]: I1205 17:05:55.328127 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:55 crc kubenswrapper[4753]: I1205 17:05:55.328162 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:55 crc kubenswrapper[4753]: I1205 17:05:55.328175 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:55Z","lastTransitionTime":"2025-12-05T17:05:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:55 crc kubenswrapper[4753]: I1205 17:05:55.430951 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:55 crc kubenswrapper[4753]: I1205 17:05:55.430996 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:55 crc kubenswrapper[4753]: I1205 17:05:55.431005 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:55 crc kubenswrapper[4753]: I1205 17:05:55.431022 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:55 crc kubenswrapper[4753]: I1205 17:05:55.431030 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:55Z","lastTransitionTime":"2025-12-05T17:05:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:55 crc kubenswrapper[4753]: I1205 17:05:55.534284 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:55 crc kubenswrapper[4753]: I1205 17:05:55.534344 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:55 crc kubenswrapper[4753]: I1205 17:05:55.534357 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:55 crc kubenswrapper[4753]: I1205 17:05:55.534379 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:55 crc kubenswrapper[4753]: I1205 17:05:55.534392 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:55Z","lastTransitionTime":"2025-12-05T17:05:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:55 crc kubenswrapper[4753]: I1205 17:05:55.637720 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:55 crc kubenswrapper[4753]: I1205 17:05:55.637778 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:55 crc kubenswrapper[4753]: I1205 17:05:55.637796 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:55 crc kubenswrapper[4753]: I1205 17:05:55.637824 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:55 crc kubenswrapper[4753]: I1205 17:05:55.637841 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:55Z","lastTransitionTime":"2025-12-05T17:05:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:55 crc kubenswrapper[4753]: I1205 17:05:55.720468 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:05:55 crc kubenswrapper[4753]: E1205 17:05:55.720694 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:05:55 crc kubenswrapper[4753]: I1205 17:05:55.739676 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:55 crc kubenswrapper[4753]: I1205 17:05:55.739734 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:55 crc kubenswrapper[4753]: I1205 17:05:55.739748 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:55 crc kubenswrapper[4753]: I1205 17:05:55.739763 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:55 crc kubenswrapper[4753]: I1205 17:05:55.739777 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:55Z","lastTransitionTime":"2025-12-05T17:05:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:55 crc kubenswrapper[4753]: I1205 17:05:55.843653 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:55 crc kubenswrapper[4753]: I1205 17:05:55.843712 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:55 crc kubenswrapper[4753]: I1205 17:05:55.843725 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:55 crc kubenswrapper[4753]: I1205 17:05:55.843745 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:55 crc kubenswrapper[4753]: I1205 17:05:55.843760 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:55Z","lastTransitionTime":"2025-12-05T17:05:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:55 crc kubenswrapper[4753]: I1205 17:05:55.947031 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:55 crc kubenswrapper[4753]: I1205 17:05:55.947101 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:55 crc kubenswrapper[4753]: I1205 17:05:55.947262 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:55 crc kubenswrapper[4753]: I1205 17:05:55.947436 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:55 crc kubenswrapper[4753]: I1205 17:05:55.947550 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:55Z","lastTransitionTime":"2025-12-05T17:05:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:56 crc kubenswrapper[4753]: I1205 17:05:56.050872 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:56 crc kubenswrapper[4753]: I1205 17:05:56.050934 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:56 crc kubenswrapper[4753]: I1205 17:05:56.050947 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:56 crc kubenswrapper[4753]: I1205 17:05:56.050968 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:56 crc kubenswrapper[4753]: I1205 17:05:56.050982 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:56Z","lastTransitionTime":"2025-12-05T17:05:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:56 crc kubenswrapper[4753]: I1205 17:05:56.153628 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:56 crc kubenswrapper[4753]: I1205 17:05:56.153708 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:56 crc kubenswrapper[4753]: I1205 17:05:56.153736 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:56 crc kubenswrapper[4753]: I1205 17:05:56.153769 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:56 crc kubenswrapper[4753]: I1205 17:05:56.153791 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:56Z","lastTransitionTime":"2025-12-05T17:05:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:56 crc kubenswrapper[4753]: I1205 17:05:56.256795 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:56 crc kubenswrapper[4753]: I1205 17:05:56.256850 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:56 crc kubenswrapper[4753]: I1205 17:05:56.256864 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:56 crc kubenswrapper[4753]: I1205 17:05:56.256884 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:56 crc kubenswrapper[4753]: I1205 17:05:56.256897 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:56Z","lastTransitionTime":"2025-12-05T17:05:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:56 crc kubenswrapper[4753]: I1205 17:05:56.360732 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:56 crc kubenswrapper[4753]: I1205 17:05:56.360814 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:56 crc kubenswrapper[4753]: I1205 17:05:56.360832 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:56 crc kubenswrapper[4753]: I1205 17:05:56.360860 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:56 crc kubenswrapper[4753]: I1205 17:05:56.360877 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:56Z","lastTransitionTime":"2025-12-05T17:05:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:56 crc kubenswrapper[4753]: I1205 17:05:56.491072 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:56 crc kubenswrapper[4753]: I1205 17:05:56.491110 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:56 crc kubenswrapper[4753]: I1205 17:05:56.491120 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:56 crc kubenswrapper[4753]: I1205 17:05:56.491136 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:56 crc kubenswrapper[4753]: I1205 17:05:56.491170 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:56Z","lastTransitionTime":"2025-12-05T17:05:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:56 crc kubenswrapper[4753]: I1205 17:05:56.593771 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:56 crc kubenswrapper[4753]: I1205 17:05:56.593815 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:56 crc kubenswrapper[4753]: I1205 17:05:56.593827 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:56 crc kubenswrapper[4753]: I1205 17:05:56.593844 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:56 crc kubenswrapper[4753]: I1205 17:05:56.593857 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:56Z","lastTransitionTime":"2025-12-05T17:05:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:56 crc kubenswrapper[4753]: I1205 17:05:56.696923 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:56 crc kubenswrapper[4753]: I1205 17:05:56.697285 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:56 crc kubenswrapper[4753]: I1205 17:05:56.697416 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:56 crc kubenswrapper[4753]: I1205 17:05:56.697563 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:56 crc kubenswrapper[4753]: I1205 17:05:56.697682 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:56Z","lastTransitionTime":"2025-12-05T17:05:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:56 crc kubenswrapper[4753]: I1205 17:05:56.720315 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:05:56 crc kubenswrapper[4753]: I1205 17:05:56.720436 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jjgfd" Dec 05 17:05:56 crc kubenswrapper[4753]: E1205 17:05:56.720543 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:05:56 crc kubenswrapper[4753]: E1205 17:05:56.720656 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jjgfd" podUID="00ab636b-9cc9-4a6f-8e6e-6442b35280ca" Dec 05 17:05:56 crc kubenswrapper[4753]: I1205 17:05:56.720326 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:05:56 crc kubenswrapper[4753]: E1205 17:05:56.720862 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:05:56 crc kubenswrapper[4753]: I1205 17:05:56.806202 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:56 crc kubenswrapper[4753]: I1205 17:05:56.806303 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:56 crc kubenswrapper[4753]: I1205 17:05:56.806366 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:56 crc kubenswrapper[4753]: I1205 17:05:56.806444 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:56 crc kubenswrapper[4753]: I1205 17:05:56.806474 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:56Z","lastTransitionTime":"2025-12-05T17:05:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:56 crc kubenswrapper[4753]: I1205 17:05:56.910219 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:56 crc kubenswrapper[4753]: I1205 17:05:56.910298 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:56 crc kubenswrapper[4753]: I1205 17:05:56.910309 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:56 crc kubenswrapper[4753]: I1205 17:05:56.910324 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:56 crc kubenswrapper[4753]: I1205 17:05:56.910334 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:56Z","lastTransitionTime":"2025-12-05T17:05:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:57 crc kubenswrapper[4753]: I1205 17:05:57.013920 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:57 crc kubenswrapper[4753]: I1205 17:05:57.014501 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:57 crc kubenswrapper[4753]: I1205 17:05:57.014717 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:57 crc kubenswrapper[4753]: I1205 17:05:57.014953 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:57 crc kubenswrapper[4753]: I1205 17:05:57.015203 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:57Z","lastTransitionTime":"2025-12-05T17:05:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:57 crc kubenswrapper[4753]: I1205 17:05:57.100398 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:57 crc kubenswrapper[4753]: I1205 17:05:57.101459 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:57 crc kubenswrapper[4753]: I1205 17:05:57.101621 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:57 crc kubenswrapper[4753]: I1205 17:05:57.101839 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:57 crc kubenswrapper[4753]: I1205 17:05:57.102083 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:57Z","lastTransitionTime":"2025-12-05T17:05:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:57 crc kubenswrapper[4753]: E1205 17:05:57.126963 4753 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:57Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:57Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6cf3541b-b15a-4035-99a1-dd4a7b86e07c\\\",\\\"systemUUID\\\":\\\"6ae126b1-60b3-4aa4-8711-3da4c1b89426\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:57Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:57 crc kubenswrapper[4753]: I1205 17:05:57.134682 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:57 crc kubenswrapper[4753]: I1205 17:05:57.134741 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:57 crc kubenswrapper[4753]: I1205 17:05:57.134763 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:57 crc kubenswrapper[4753]: I1205 17:05:57.134790 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:57 crc kubenswrapper[4753]: I1205 17:05:57.134811 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:57Z","lastTransitionTime":"2025-12-05T17:05:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:57 crc kubenswrapper[4753]: E1205 17:05:57.157655 4753 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:57Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:57Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6cf3541b-b15a-4035-99a1-dd4a7b86e07c\\\",\\\"systemUUID\\\":\\\"6ae126b1-60b3-4aa4-8711-3da4c1b89426\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:57Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:57 crc kubenswrapper[4753]: I1205 17:05:57.163691 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:57 crc kubenswrapper[4753]: I1205 17:05:57.163942 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:57 crc kubenswrapper[4753]: I1205 17:05:57.164013 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:57 crc kubenswrapper[4753]: I1205 17:05:57.164096 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:57 crc kubenswrapper[4753]: I1205 17:05:57.164197 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:57Z","lastTransitionTime":"2025-12-05T17:05:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:57 crc kubenswrapper[4753]: E1205 17:05:57.186030 4753 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:57Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:57Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6cf3541b-b15a-4035-99a1-dd4a7b86e07c\\\",\\\"systemUUID\\\":\\\"6ae126b1-60b3-4aa4-8711-3da4c1b89426\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:57Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:57 crc kubenswrapper[4753]: I1205 17:05:57.194084 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:57 crc kubenswrapper[4753]: I1205 17:05:57.194184 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:57 crc kubenswrapper[4753]: I1205 17:05:57.194206 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:57 crc kubenswrapper[4753]: I1205 17:05:57.194241 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:57 crc kubenswrapper[4753]: I1205 17:05:57.194261 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:57Z","lastTransitionTime":"2025-12-05T17:05:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:57 crc kubenswrapper[4753]: E1205 17:05:57.219800 4753 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:57Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:57Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6cf3541b-b15a-4035-99a1-dd4a7b86e07c\\\",\\\"systemUUID\\\":\\\"6ae126b1-60b3-4aa4-8711-3da4c1b89426\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:57Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:57 crc kubenswrapper[4753]: I1205 17:05:57.226370 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:57 crc kubenswrapper[4753]: I1205 17:05:57.226438 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:57 crc kubenswrapper[4753]: I1205 17:05:57.226457 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:57 crc kubenswrapper[4753]: I1205 17:05:57.226488 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:57 crc kubenswrapper[4753]: I1205 17:05:57.226507 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:57Z","lastTransitionTime":"2025-12-05T17:05:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:57 crc kubenswrapper[4753]: E1205 17:05:57.249280 4753 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:57Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:05:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:57Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6cf3541b-b15a-4035-99a1-dd4a7b86e07c\\\",\\\"systemUUID\\\":\\\"6ae126b1-60b3-4aa4-8711-3da4c1b89426\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:05:57Z is after 2025-08-24T17:21:41Z" Dec 05 17:05:57 crc kubenswrapper[4753]: E1205 17:05:57.249507 4753 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 17:05:57 crc kubenswrapper[4753]: I1205 17:05:57.252612 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:57 crc kubenswrapper[4753]: I1205 17:05:57.252864 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:57 crc kubenswrapper[4753]: I1205 17:05:57.253024 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:57 crc kubenswrapper[4753]: I1205 17:05:57.253208 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:57 crc kubenswrapper[4753]: I1205 17:05:57.253535 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:57Z","lastTransitionTime":"2025-12-05T17:05:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:57 crc kubenswrapper[4753]: I1205 17:05:57.357812 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:57 crc kubenswrapper[4753]: I1205 17:05:57.357874 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:57 crc kubenswrapper[4753]: I1205 17:05:57.357893 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:57 crc kubenswrapper[4753]: I1205 17:05:57.357921 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:57 crc kubenswrapper[4753]: I1205 17:05:57.357962 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:57Z","lastTransitionTime":"2025-12-05T17:05:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:57 crc kubenswrapper[4753]: I1205 17:05:57.462270 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:57 crc kubenswrapper[4753]: I1205 17:05:57.462339 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:57 crc kubenswrapper[4753]: I1205 17:05:57.462363 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:57 crc kubenswrapper[4753]: I1205 17:05:57.462403 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:57 crc kubenswrapper[4753]: I1205 17:05:57.462432 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:57Z","lastTransitionTime":"2025-12-05T17:05:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:57 crc kubenswrapper[4753]: I1205 17:05:57.566957 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:57 crc kubenswrapper[4753]: I1205 17:05:57.567053 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:57 crc kubenswrapper[4753]: I1205 17:05:57.567084 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:57 crc kubenswrapper[4753]: I1205 17:05:57.567126 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:57 crc kubenswrapper[4753]: I1205 17:05:57.567236 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:57Z","lastTransitionTime":"2025-12-05T17:05:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:57 crc kubenswrapper[4753]: I1205 17:05:57.671218 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:57 crc kubenswrapper[4753]: I1205 17:05:57.671275 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:57 crc kubenswrapper[4753]: I1205 17:05:57.671299 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:57 crc kubenswrapper[4753]: I1205 17:05:57.671335 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:57 crc kubenswrapper[4753]: I1205 17:05:57.671354 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:57Z","lastTransitionTime":"2025-12-05T17:05:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:57 crc kubenswrapper[4753]: I1205 17:05:57.720483 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:05:57 crc kubenswrapper[4753]: E1205 17:05:57.720702 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:05:57 crc kubenswrapper[4753]: I1205 17:05:57.774102 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:57 crc kubenswrapper[4753]: I1205 17:05:57.774215 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:57 crc kubenswrapper[4753]: I1205 17:05:57.774237 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:57 crc kubenswrapper[4753]: I1205 17:05:57.774269 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:57 crc kubenswrapper[4753]: I1205 17:05:57.774291 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:57Z","lastTransitionTime":"2025-12-05T17:05:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:57 crc kubenswrapper[4753]: I1205 17:05:57.877706 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:57 crc kubenswrapper[4753]: I1205 17:05:57.877805 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:57 crc kubenswrapper[4753]: I1205 17:05:57.877833 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:57 crc kubenswrapper[4753]: I1205 17:05:57.877874 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:57 crc kubenswrapper[4753]: I1205 17:05:57.877903 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:57Z","lastTransitionTime":"2025-12-05T17:05:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:57 crc kubenswrapper[4753]: I1205 17:05:57.981267 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:57 crc kubenswrapper[4753]: I1205 17:05:57.981321 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:57 crc kubenswrapper[4753]: I1205 17:05:57.981331 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:57 crc kubenswrapper[4753]: I1205 17:05:57.981353 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:57 crc kubenswrapper[4753]: I1205 17:05:57.981366 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:57Z","lastTransitionTime":"2025-12-05T17:05:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:58 crc kubenswrapper[4753]: I1205 17:05:58.085260 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:58 crc kubenswrapper[4753]: I1205 17:05:58.085320 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:58 crc kubenswrapper[4753]: I1205 17:05:58.085337 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:58 crc kubenswrapper[4753]: I1205 17:05:58.085362 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:58 crc kubenswrapper[4753]: I1205 17:05:58.085379 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:58Z","lastTransitionTime":"2025-12-05T17:05:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:58 crc kubenswrapper[4753]: I1205 17:05:58.188814 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:58 crc kubenswrapper[4753]: I1205 17:05:58.188881 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:58 crc kubenswrapper[4753]: I1205 17:05:58.188898 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:58 crc kubenswrapper[4753]: I1205 17:05:58.188920 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:58 crc kubenswrapper[4753]: I1205 17:05:58.188945 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:58Z","lastTransitionTime":"2025-12-05T17:05:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:58 crc kubenswrapper[4753]: I1205 17:05:58.291785 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:58 crc kubenswrapper[4753]: I1205 17:05:58.291849 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:58 crc kubenswrapper[4753]: I1205 17:05:58.291863 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:58 crc kubenswrapper[4753]: I1205 17:05:58.291890 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:58 crc kubenswrapper[4753]: I1205 17:05:58.291902 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:58Z","lastTransitionTime":"2025-12-05T17:05:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:58 crc kubenswrapper[4753]: I1205 17:05:58.395103 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:58 crc kubenswrapper[4753]: I1205 17:05:58.395137 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:58 crc kubenswrapper[4753]: I1205 17:05:58.395165 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:58 crc kubenswrapper[4753]: I1205 17:05:58.395179 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:58 crc kubenswrapper[4753]: I1205 17:05:58.395189 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:58Z","lastTransitionTime":"2025-12-05T17:05:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:58 crc kubenswrapper[4753]: I1205 17:05:58.499099 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:58 crc kubenswrapper[4753]: I1205 17:05:58.499264 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:58 crc kubenswrapper[4753]: I1205 17:05:58.499292 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:58 crc kubenswrapper[4753]: I1205 17:05:58.499507 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:58 crc kubenswrapper[4753]: I1205 17:05:58.499529 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:58Z","lastTransitionTime":"2025-12-05T17:05:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:58 crc kubenswrapper[4753]: I1205 17:05:58.603312 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:58 crc kubenswrapper[4753]: I1205 17:05:58.603386 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:58 crc kubenswrapper[4753]: I1205 17:05:58.603407 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:58 crc kubenswrapper[4753]: I1205 17:05:58.603435 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:58 crc kubenswrapper[4753]: I1205 17:05:58.603453 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:58Z","lastTransitionTime":"2025-12-05T17:05:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:58 crc kubenswrapper[4753]: I1205 17:05:58.707654 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:58 crc kubenswrapper[4753]: I1205 17:05:58.707718 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:58 crc kubenswrapper[4753]: I1205 17:05:58.707726 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:58 crc kubenswrapper[4753]: I1205 17:05:58.707743 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:58 crc kubenswrapper[4753]: I1205 17:05:58.707752 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:58Z","lastTransitionTime":"2025-12-05T17:05:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:58 crc kubenswrapper[4753]: I1205 17:05:58.720203 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:05:58 crc kubenswrapper[4753]: I1205 17:05:58.720295 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:05:58 crc kubenswrapper[4753]: I1205 17:05:58.720305 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jjgfd" Dec 05 17:05:58 crc kubenswrapper[4753]: E1205 17:05:58.720384 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:05:58 crc kubenswrapper[4753]: E1205 17:05:58.720837 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:05:58 crc kubenswrapper[4753]: E1205 17:05:58.720977 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jjgfd" podUID="00ab636b-9cc9-4a6f-8e6e-6442b35280ca" Dec 05 17:05:58 crc kubenswrapper[4753]: I1205 17:05:58.745764 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Dec 05 17:05:58 crc kubenswrapper[4753]: I1205 17:05:58.810776 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:58 crc kubenswrapper[4753]: I1205 17:05:58.810847 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:58 crc kubenswrapper[4753]: I1205 17:05:58.810859 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:58 crc kubenswrapper[4753]: I1205 17:05:58.810936 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:58 crc kubenswrapper[4753]: I1205 17:05:58.810949 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:58Z","lastTransitionTime":"2025-12-05T17:05:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:58 crc kubenswrapper[4753]: I1205 17:05:58.916203 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:58 crc kubenswrapper[4753]: I1205 17:05:58.916239 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:58 crc kubenswrapper[4753]: I1205 17:05:58.916252 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:58 crc kubenswrapper[4753]: I1205 17:05:58.916269 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:58 crc kubenswrapper[4753]: I1205 17:05:58.916281 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:58Z","lastTransitionTime":"2025-12-05T17:05:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:59 crc kubenswrapper[4753]: I1205 17:05:59.018180 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:59 crc kubenswrapper[4753]: I1205 17:05:59.018223 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:59 crc kubenswrapper[4753]: I1205 17:05:59.018234 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:59 crc kubenswrapper[4753]: I1205 17:05:59.018250 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:59 crc kubenswrapper[4753]: I1205 17:05:59.018262 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:59Z","lastTransitionTime":"2025-12-05T17:05:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:59 crc kubenswrapper[4753]: I1205 17:05:59.120400 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:59 crc kubenswrapper[4753]: I1205 17:05:59.120429 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:59 crc kubenswrapper[4753]: I1205 17:05:59.120437 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:59 crc kubenswrapper[4753]: I1205 17:05:59.120450 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:59 crc kubenswrapper[4753]: I1205 17:05:59.120459 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:59Z","lastTransitionTime":"2025-12-05T17:05:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:59 crc kubenswrapper[4753]: I1205 17:05:59.222737 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:59 crc kubenswrapper[4753]: I1205 17:05:59.222787 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:59 crc kubenswrapper[4753]: I1205 17:05:59.222826 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:59 crc kubenswrapper[4753]: I1205 17:05:59.222845 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:59 crc kubenswrapper[4753]: I1205 17:05:59.222859 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:59Z","lastTransitionTime":"2025-12-05T17:05:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:59 crc kubenswrapper[4753]: I1205 17:05:59.324862 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:59 crc kubenswrapper[4753]: I1205 17:05:59.324891 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:59 crc kubenswrapper[4753]: I1205 17:05:59.324899 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:59 crc kubenswrapper[4753]: I1205 17:05:59.324914 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:59 crc kubenswrapper[4753]: I1205 17:05:59.324924 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:59Z","lastTransitionTime":"2025-12-05T17:05:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:59 crc kubenswrapper[4753]: I1205 17:05:59.427420 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:59 crc kubenswrapper[4753]: I1205 17:05:59.427463 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:59 crc kubenswrapper[4753]: I1205 17:05:59.427480 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:59 crc kubenswrapper[4753]: I1205 17:05:59.427495 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:59 crc kubenswrapper[4753]: I1205 17:05:59.427505 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:59Z","lastTransitionTime":"2025-12-05T17:05:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:59 crc kubenswrapper[4753]: I1205 17:05:59.529614 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:59 crc kubenswrapper[4753]: I1205 17:05:59.529653 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:59 crc kubenswrapper[4753]: I1205 17:05:59.529665 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:59 crc kubenswrapper[4753]: I1205 17:05:59.529680 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:59 crc kubenswrapper[4753]: I1205 17:05:59.529692 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:59Z","lastTransitionTime":"2025-12-05T17:05:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:59 crc kubenswrapper[4753]: I1205 17:05:59.632291 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:59 crc kubenswrapper[4753]: I1205 17:05:59.632334 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:59 crc kubenswrapper[4753]: I1205 17:05:59.632346 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:59 crc kubenswrapper[4753]: I1205 17:05:59.632363 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:59 crc kubenswrapper[4753]: I1205 17:05:59.632375 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:59Z","lastTransitionTime":"2025-12-05T17:05:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:59 crc kubenswrapper[4753]: I1205 17:05:59.720406 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:05:59 crc kubenswrapper[4753]: E1205 17:05:59.720719 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:05:59 crc kubenswrapper[4753]: I1205 17:05:59.721180 4753 scope.go:117] "RemoveContainer" containerID="7a20782d19103ad754e8771413e6ca949a706a6876c2e949437969541c112d24" Dec 05 17:05:59 crc kubenswrapper[4753]: E1205 17:05:59.721341 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-98fvv_openshift-ovn-kubernetes(f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a)\"" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" podUID="f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" Dec 05 17:05:59 crc kubenswrapper[4753]: I1205 17:05:59.734869 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:59 crc kubenswrapper[4753]: I1205 17:05:59.734911 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:59 crc kubenswrapper[4753]: I1205 17:05:59.734922 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:59 crc kubenswrapper[4753]: I1205 17:05:59.734938 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:59 crc kubenswrapper[4753]: I1205 17:05:59.734951 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:59Z","lastTransitionTime":"2025-12-05T17:05:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:59 crc kubenswrapper[4753]: I1205 17:05:59.837423 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:59 crc kubenswrapper[4753]: I1205 17:05:59.837458 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:59 crc kubenswrapper[4753]: I1205 17:05:59.837468 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:59 crc kubenswrapper[4753]: I1205 17:05:59.837483 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:59 crc kubenswrapper[4753]: I1205 17:05:59.837494 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:59Z","lastTransitionTime":"2025-12-05T17:05:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:05:59 crc kubenswrapper[4753]: I1205 17:05:59.940710 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:05:59 crc kubenswrapper[4753]: I1205 17:05:59.940756 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:05:59 crc kubenswrapper[4753]: I1205 17:05:59.940766 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:05:59 crc kubenswrapper[4753]: I1205 17:05:59.940781 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:05:59 crc kubenswrapper[4753]: I1205 17:05:59.940790 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:05:59Z","lastTransitionTime":"2025-12-05T17:05:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:00 crc kubenswrapper[4753]: I1205 17:06:00.043185 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:00 crc kubenswrapper[4753]: I1205 17:06:00.043228 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:00 crc kubenswrapper[4753]: I1205 17:06:00.043238 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:00 crc kubenswrapper[4753]: I1205 17:06:00.043252 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:00 crc kubenswrapper[4753]: I1205 17:06:00.043265 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:00Z","lastTransitionTime":"2025-12-05T17:06:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:00 crc kubenswrapper[4753]: I1205 17:06:00.145891 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:00 crc kubenswrapper[4753]: I1205 17:06:00.145923 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:00 crc kubenswrapper[4753]: I1205 17:06:00.145931 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:00 crc kubenswrapper[4753]: I1205 17:06:00.145944 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:00 crc kubenswrapper[4753]: I1205 17:06:00.145953 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:00Z","lastTransitionTime":"2025-12-05T17:06:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:00 crc kubenswrapper[4753]: I1205 17:06:00.248723 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:00 crc kubenswrapper[4753]: I1205 17:06:00.248760 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:00 crc kubenswrapper[4753]: I1205 17:06:00.248772 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:00 crc kubenswrapper[4753]: I1205 17:06:00.248787 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:00 crc kubenswrapper[4753]: I1205 17:06:00.248798 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:00Z","lastTransitionTime":"2025-12-05T17:06:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:00 crc kubenswrapper[4753]: I1205 17:06:00.350846 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:00 crc kubenswrapper[4753]: I1205 17:06:00.350884 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:00 crc kubenswrapper[4753]: I1205 17:06:00.350893 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:00 crc kubenswrapper[4753]: I1205 17:06:00.350906 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:00 crc kubenswrapper[4753]: I1205 17:06:00.350915 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:00Z","lastTransitionTime":"2025-12-05T17:06:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:00 crc kubenswrapper[4753]: I1205 17:06:00.453327 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:00 crc kubenswrapper[4753]: I1205 17:06:00.453374 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:00 crc kubenswrapper[4753]: I1205 17:06:00.453392 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:00 crc kubenswrapper[4753]: I1205 17:06:00.453458 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:00 crc kubenswrapper[4753]: I1205 17:06:00.453477 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:00Z","lastTransitionTime":"2025-12-05T17:06:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:00 crc kubenswrapper[4753]: I1205 17:06:00.556106 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:00 crc kubenswrapper[4753]: I1205 17:06:00.556426 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:00 crc kubenswrapper[4753]: I1205 17:06:00.556557 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:00 crc kubenswrapper[4753]: I1205 17:06:00.556682 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:00 crc kubenswrapper[4753]: I1205 17:06:00.556782 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:00Z","lastTransitionTime":"2025-12-05T17:06:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:00 crc kubenswrapper[4753]: I1205 17:06:00.659202 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:00 crc kubenswrapper[4753]: I1205 17:06:00.659246 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:00 crc kubenswrapper[4753]: I1205 17:06:00.659257 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:00 crc kubenswrapper[4753]: I1205 17:06:00.659275 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:00 crc kubenswrapper[4753]: I1205 17:06:00.659287 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:00Z","lastTransitionTime":"2025-12-05T17:06:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:00 crc kubenswrapper[4753]: I1205 17:06:00.719834 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jjgfd" Dec 05 17:06:00 crc kubenswrapper[4753]: E1205 17:06:00.720280 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jjgfd" podUID="00ab636b-9cc9-4a6f-8e6e-6442b35280ca" Dec 05 17:06:00 crc kubenswrapper[4753]: I1205 17:06:00.719958 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:06:00 crc kubenswrapper[4753]: E1205 17:06:00.720573 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:06:00 crc kubenswrapper[4753]: I1205 17:06:00.719833 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:06:00 crc kubenswrapper[4753]: E1205 17:06:00.720832 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:06:00 crc kubenswrapper[4753]: I1205 17:06:00.762610 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:00 crc kubenswrapper[4753]: I1205 17:06:00.762666 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:00 crc kubenswrapper[4753]: I1205 17:06:00.762686 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:00 crc kubenswrapper[4753]: I1205 17:06:00.762709 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:00 crc kubenswrapper[4753]: I1205 17:06:00.762727 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:00Z","lastTransitionTime":"2025-12-05T17:06:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:00 crc kubenswrapper[4753]: I1205 17:06:00.865889 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:00 crc kubenswrapper[4753]: I1205 17:06:00.865943 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:00 crc kubenswrapper[4753]: I1205 17:06:00.865957 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:00 crc kubenswrapper[4753]: I1205 17:06:00.865975 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:00 crc kubenswrapper[4753]: I1205 17:06:00.865988 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:00Z","lastTransitionTime":"2025-12-05T17:06:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:00 crc kubenswrapper[4753]: I1205 17:06:00.969007 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:00 crc kubenswrapper[4753]: I1205 17:06:00.969048 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:00 crc kubenswrapper[4753]: I1205 17:06:00.969059 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:00 crc kubenswrapper[4753]: I1205 17:06:00.969075 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:00 crc kubenswrapper[4753]: I1205 17:06:00.969087 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:00Z","lastTransitionTime":"2025-12-05T17:06:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:01 crc kubenswrapper[4753]: I1205 17:06:01.072787 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:01 crc kubenswrapper[4753]: I1205 17:06:01.072866 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:01 crc kubenswrapper[4753]: I1205 17:06:01.072932 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:01 crc kubenswrapper[4753]: I1205 17:06:01.072958 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:01 crc kubenswrapper[4753]: I1205 17:06:01.072977 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:01Z","lastTransitionTime":"2025-12-05T17:06:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:01 crc kubenswrapper[4753]: I1205 17:06:01.155584 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/00ab636b-9cc9-4a6f-8e6e-6442b35280ca-metrics-certs\") pod \"network-metrics-daemon-jjgfd\" (UID: \"00ab636b-9cc9-4a6f-8e6e-6442b35280ca\") " pod="openshift-multus/network-metrics-daemon-jjgfd" Dec 05 17:06:01 crc kubenswrapper[4753]: E1205 17:06:01.156228 4753 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 17:06:01 crc kubenswrapper[4753]: E1205 17:06:01.156526 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/00ab636b-9cc9-4a6f-8e6e-6442b35280ca-metrics-certs podName:00ab636b-9cc9-4a6f-8e6e-6442b35280ca nodeName:}" failed. No retries permitted until 2025-12-05 17:07:05.156493102 +0000 UTC m=+163.659600178 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/00ab636b-9cc9-4a6f-8e6e-6442b35280ca-metrics-certs") pod "network-metrics-daemon-jjgfd" (UID: "00ab636b-9cc9-4a6f-8e6e-6442b35280ca") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 17:06:01 crc kubenswrapper[4753]: I1205 17:06:01.176242 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:01 crc kubenswrapper[4753]: I1205 17:06:01.176294 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:01 crc kubenswrapper[4753]: I1205 17:06:01.176307 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:01 crc kubenswrapper[4753]: I1205 17:06:01.176323 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:01 crc kubenswrapper[4753]: I1205 17:06:01.176334 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:01Z","lastTransitionTime":"2025-12-05T17:06:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:01 crc kubenswrapper[4753]: I1205 17:06:01.279036 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:01 crc kubenswrapper[4753]: I1205 17:06:01.279075 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:01 crc kubenswrapper[4753]: I1205 17:06:01.279083 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:01 crc kubenswrapper[4753]: I1205 17:06:01.279098 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:01 crc kubenswrapper[4753]: I1205 17:06:01.279107 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:01Z","lastTransitionTime":"2025-12-05T17:06:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:01 crc kubenswrapper[4753]: I1205 17:06:01.381553 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:01 crc kubenswrapper[4753]: I1205 17:06:01.381598 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:01 crc kubenswrapper[4753]: I1205 17:06:01.381609 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:01 crc kubenswrapper[4753]: I1205 17:06:01.381629 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:01 crc kubenswrapper[4753]: I1205 17:06:01.381644 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:01Z","lastTransitionTime":"2025-12-05T17:06:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:01 crc kubenswrapper[4753]: I1205 17:06:01.485113 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:01 crc kubenswrapper[4753]: I1205 17:06:01.485194 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:01 crc kubenswrapper[4753]: I1205 17:06:01.485206 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:01 crc kubenswrapper[4753]: I1205 17:06:01.485232 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:01 crc kubenswrapper[4753]: I1205 17:06:01.485251 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:01Z","lastTransitionTime":"2025-12-05T17:06:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:01 crc kubenswrapper[4753]: I1205 17:06:01.588318 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:01 crc kubenswrapper[4753]: I1205 17:06:01.588404 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:01 crc kubenswrapper[4753]: I1205 17:06:01.588427 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:01 crc kubenswrapper[4753]: I1205 17:06:01.588497 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:01 crc kubenswrapper[4753]: I1205 17:06:01.588519 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:01Z","lastTransitionTime":"2025-12-05T17:06:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:01 crc kubenswrapper[4753]: I1205 17:06:01.692199 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:01 crc kubenswrapper[4753]: I1205 17:06:01.692264 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:01 crc kubenswrapper[4753]: I1205 17:06:01.692282 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:01 crc kubenswrapper[4753]: I1205 17:06:01.692310 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:01 crc kubenswrapper[4753]: I1205 17:06:01.692327 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:01Z","lastTransitionTime":"2025-12-05T17:06:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:01 crc kubenswrapper[4753]: I1205 17:06:01.720020 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:06:01 crc kubenswrapper[4753]: E1205 17:06:01.720592 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:06:01 crc kubenswrapper[4753]: I1205 17:06:01.746032 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:06:01Z is after 2025-08-24T17:21:41Z" Dec 05 17:06:01 crc kubenswrapper[4753]: I1205 17:06:01.766390 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vj5f7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f38be29-f040-4e7d-9026-36929c0c5cda\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b5d403644e00b367518058ecf22ed9913df9a1353e2fede8802deaa288a48e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc2cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vj5f7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:06:01Z is after 2025-08-24T17:21:41Z" Dec 05 17:06:01 crc kubenswrapper[4753]: I1205 17:06:01.787743 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:06:01Z is after 2025-08-24T17:21:41Z" Dec 05 17:06:01 crc kubenswrapper[4753]: I1205 17:06:01.794922 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:01 crc kubenswrapper[4753]: I1205 17:06:01.795216 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:01 crc kubenswrapper[4753]: I1205 17:06:01.795432 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:01 crc kubenswrapper[4753]: I1205 17:06:01.795598 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:01 crc kubenswrapper[4753]: I1205 17:06:01.795768 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:01Z","lastTransitionTime":"2025-12-05T17:06:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:01 crc kubenswrapper[4753]: I1205 17:06:01.810742 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:06:01Z is after 2025-08-24T17:21:41Z" Dec 05 17:06:01 crc kubenswrapper[4753]: I1205 17:06:01.834480 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"65284411-042e-46b9-8e55-cabc9e78a397\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1e83b48779d4b8b709f9bd7351180040315002f738ada5ab034c883f87ef8734\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0f6e27e109eeb52dae4e4b35bdf84e78f0786fc8d15c2498dd8d8c70c3d64e4d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e7886d72b05768ec1d515cfb2837f52ae16e82445b72a3016762308b80892d55\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3a662a8ca2ca9bcf02ace463bf26e44293b9776c69f3f1f84110392c3cab6e83\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:06:01Z is after 2025-08-24T17:21:41Z" Dec 05 17:06:01 crc kubenswrapper[4753]: I1205 17:06:01.859282 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fb6627d3-0817-4737-b5c7-00b2abc67b0f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bba91e82256742fa5b98e2e481b8ab267ce89d71e180581f34fa371d03bbc1c5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf90148205c7fde7c73b3ed1130fa5c9fc4b6562c0e1f4f6b009a72677f32f22\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://95027e80190e6caa0524ede7ef31c38c8861aeca94936b1a7a819fdc650c7969\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1bd3444395954ccae8d5c8137c6ebd7f62a6c864cea89f98a137af3e26cda8f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1bd3444395954ccae8d5c8137c6ebd7f62a6c864cea89f98a137af3e26cda8f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:06:01Z is after 2025-08-24T17:21:41Z" Dec 05 17:06:01 crc kubenswrapper[4753]: I1205 17:06:01.890184 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-2rg4s" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"751d4d21-4eb8-4236-bca2-d81f094ff2f8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1bda355ba818e87604cf7cfba53f0ee3116f2d4234e7d9631ec7e6571ed34030\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa8d4e5672a07ea28ddba02eaaf603750cd3067ff47165776d2dc7f2ad42a290\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aa8d4e5672a07ea28ddba02eaaf603750cd3067ff47165776d2dc7f2ad42a290\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://87e685f0d2c857d143c3ee4b13adf2afe6796c40ae87e2a304b78783907a785a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://87e685f0d2c857d143c3ee4b13adf2afe6796c40ae87e2a304b78783907a785a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://842a75d6b100069785dbaf413df3b610a5df28c9ec124100eb31c965985282fa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://842a75d6b100069785dbaf413df3b610a5df28c9ec124100eb31c965985282fa\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4ac00f8ef346db3de91e7bec01a56eac9f27d5a501d6f686173ece4897b9694\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f4ac00f8ef346db3de91e7bec01a56eac9f27d5a501d6f686173ece4897b9694\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3585437cbe0f17e5bf92a74123f0e6f13475b6e5d49b145f2b9b869559b53310\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3585437cbe0f17e5bf92a74123f0e6f13475b6e5d49b145f2b9b869559b53310\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e60aa96f2377e63e44e932efe51f79832b778b8ef41fd47e728a80286956c62\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e60aa96f2377e63e44e932efe51f79832b778b8ef41fd47e728a80286956c62\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5sdf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-2rg4s\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:06:01Z is after 2025-08-24T17:21:41Z" Dec 05 17:06:01 crc kubenswrapper[4753]: I1205 17:06:01.899271 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:01 crc kubenswrapper[4753]: I1205 17:06:01.899327 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:01 crc kubenswrapper[4753]: I1205 17:06:01.899341 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:01 crc kubenswrapper[4753]: I1205 17:06:01.899364 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:01 crc kubenswrapper[4753]: I1205 17:06:01.899379 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:01Z","lastTransitionTime":"2025-12-05T17:06:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:01 crc kubenswrapper[4753]: I1205 17:06:01.925862 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6f6585b3b62fc4a78f89b3413326d6d5259cb8c338936f2d5def6185d81d46f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0cb72322d436f10b006a4bb7a91b255451aba90e86100fdf249be6443159bd89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d0273d889da0bd6ea3db8040da2eab62fc750f308cb9a62dee71b6c9f2fadd9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e79e99eb44ff95c2f9929326313e41629e6e03ebc068537bd27bc4c89ad5b0ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://20bcab2c2912120c819bacdf478db82a78500908125e6efadaef1f3409eb0d49\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9ead15f2a3179f03d770beadcaa1374561d5aebb4874615a334edccff0c2f300\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7a20782d19103ad754e8771413e6ca949a706a6876c2e949437969541c112d24\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7a20782d19103ad754e8771413e6ca949a706a6876c2e949437969541c112d24\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:05:45Z\\\",\\\"message\\\":\\\"y.go:365] Adding new object: *v1.Pod openshift-multus/multus-additional-cni-plugins-2rg4s\\\\nI1205 17:05:45.473503 6840 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-operator-lifecycle-manager/package-server-manager-metrics]} name:Service_openshift-operator-lifecycle-manager/package-server-manager-metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.110:8443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {f9232b32-e89f-4c8e-acc4-c6801b70dcb0}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:NB_Global Row:map[] Rows:[] Columns:[] Mutations:[{Column:nb_cfg Mutator:+= Value:1}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {6011affd-30a6-4be6-872d-e4cf1ca780cf}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 17:05:45.473553 6840 ovn.go:134] Ensuring zone local for Pod openshift-multus/multus-additional-cni-plugins-2rg4s in node crc\\\\nI1205 17:05:45.473547 6840 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mut\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:05:44Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-98fvv_openshift-ovn-kubernetes(f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://899db0b5d45776b0560a2908ce63ec8ea2b458e2e492189737e930688b4ed2e2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://073ffcef7324b498f2cb47680b64bf44e474a7732a5c2b34661da22c3e0f6c17\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://073ffcef7324b498f2cb47680b64bf44e474a7732a5c2b34661da22c3e0f6c17\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2fgl2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-98fvv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:06:01Z is after 2025-08-24T17:21:41Z" Dec 05 17:06:01 crc kubenswrapper[4753]: I1205 17:06:01.947110 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-f6qn6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"693c26bb-e75f-4f7f-bd2a-bdf7dcb0af06\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://02ea0085a961c324f4a426d68972043f4b24b3aa03aed623ebc46bbafc857d1a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-g4w6q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:45Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-f6qn6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:06:01Z is after 2025-08-24T17:21:41Z" Dec 05 17:06:01 crc kubenswrapper[4753]: I1205 17:06:01.968376 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bhvk4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"338d3da9-209c-4ca9-a37d-6ea5731d1622\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7747e4738055a7b994119bad9b30d47e0b510c7407c53df29b674c553dbbcbb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qh6zg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://097885ac231f081a81d51cf0091df6f93ca49068340438454db429194e5475b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qh6zg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:55Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-bhvk4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:06:01Z is after 2025-08-24T17:21:41Z" Dec 05 17:06:01 crc kubenswrapper[4753]: I1205 17:06:01.987374 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"69636b03-af7a-49ce-b136-0ea61ea1bc56\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8012c6347b457094531c3dfc5c7f18bec3f7cebb83d341ab1ed030443d6f4455\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e42b55a106572b6b023a9d60bea634c6a6d7600989d23ba444a6366fd4980a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5e42b55a106572b6b023a9d60bea634c6a6d7600989d23ba444a6366fd4980a9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:06:01Z is after 2025-08-24T17:21:41Z" Dec 05 17:06:02 crc kubenswrapper[4753]: I1205 17:06:02.003574 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:02 crc kubenswrapper[4753]: I1205 17:06:02.003661 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:02 crc kubenswrapper[4753]: I1205 17:06:02.003681 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:02 crc kubenswrapper[4753]: I1205 17:06:02.003713 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:02 crc kubenswrapper[4753]: I1205 17:06:02.003734 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:02Z","lastTransitionTime":"2025-12-05T17:06:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:02 crc kubenswrapper[4753]: I1205 17:06:02.025859 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6d24fc35-49ed-4e87-8551-3dd6e3a69989\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00ad2b7bc9127cb48d5c66fd08bb103b640668ba8e2f0232e96c7ff35fc81d02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc865793a4e57c985c048cbe1039b14a5d741740398b0668a3357aad17bdc7ee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6ab17dafe5558c990006acd7086b246ab4de08cecae110ea0c45c715319105c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2aad9ac38a4559b5429c6e6084e4ac76d82dedf05857d6dda0213c85daade35e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d57e4d7cbce488283393f816c9b71f654bf789b6900640a06bd6318b62ed3c51\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://498f195e044debe4363789dc7c265123a594ed15546a33aff7aeca45163d4a08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://498f195e044debe4363789dc7c265123a594ed15546a33aff7aeca45163d4a08\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57f8f3748e2769dfee756be6f04df3aaaa6e9ea410631aaec4c2b0fb36380aba\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://57f8f3748e2769dfee756be6f04df3aaaa6e9ea410631aaec4c2b0fb36380aba\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8d0e87a821df2df8d12a9d1ebfc28918e04955cfddb52ecc5fdf17595715ed05\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8d0e87a821df2df8d12a9d1ebfc28918e04955cfddb52ecc5fdf17595715ed05\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:06:02Z is after 2025-08-24T17:21:41Z" Dec 05 17:06:02 crc kubenswrapper[4753]: I1205 17:06:02.043556 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-jjgfd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"00ab636b-9cc9-4a6f-8e6e-6442b35280ca\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhlg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bhlg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:57Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-jjgfd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:06:02Z is after 2025-08-24T17:21:41Z" Dec 05 17:06:02 crc kubenswrapper[4753]: I1205 17:06:02.060250 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://408690fbd818b50273c914cc463acec30ff77bbf7d26680bffa2451a1d1ddcf7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:06:02Z is after 2025-08-24T17:21:41Z" Dec 05 17:06:02 crc kubenswrapper[4753]: I1205 17:06:02.078267 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://78c3b80f4d08dc9fd4215fc87bbd7331f765f301fa587f5fd4ded54cd8db6587\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://60c5d0ca3d26171050d592c3d76f0db4ca1a12344c40825a8e7c5fd579814c80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vzfkz\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-khn68\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:06:02Z is after 2025-08-24T17:21:41Z" Dec 05 17:06:02 crc kubenswrapper[4753]: I1205 17:06:02.097973 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c52dbe30298514a572fd7cf4a301357ef0290af5894be7f9d08aff6c3fbe85a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:06:02Z is after 2025-08-24T17:21:41Z" Dec 05 17:06:02 crc kubenswrapper[4753]: I1205 17:06:02.107274 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:02 crc kubenswrapper[4753]: I1205 17:06:02.107314 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:02 crc kubenswrapper[4753]: I1205 17:06:02.107327 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:02 crc kubenswrapper[4753]: I1205 17:06:02.107351 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:02 crc kubenswrapper[4753]: I1205 17:06:02.107365 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:02Z","lastTransitionTime":"2025-12-05T17:06:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:02 crc kubenswrapper[4753]: I1205 17:06:02.123605 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hpl8r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1096ebd14458819c0dbe4f5a86f2812eb19236d7d5062bfe2eda62ecdc05c55e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b2cc49130697a15123f202462d2f0781903baea370d4906d22d1b39c23320d7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:05:30Z\\\",\\\"message\\\":\\\"2025-12-05T17:04:44+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_2d7d6f8b-5939-404d-a339-48cbe066dc8a\\\\n2025-12-05T17:04:44+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_2d7d6f8b-5939-404d-a339-48cbe066dc8a to /host/opt/cni/bin/\\\\n2025-12-05T17:04:44Z [verbose] multus-daemon started\\\\n2025-12-05T17:04:44Z [verbose] Readiness Indicator file check\\\\n2025-12-05T17:05:29Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:05:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmzrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hpl8r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:06:02Z is after 2025-08-24T17:21:41Z" Dec 05 17:06:02 crc kubenswrapper[4753]: I1205 17:06:02.145518 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee37fdbe-64d5-4bb1-8522-932d83e0648e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://195b8bf18e6cf937def214e377ec9def799c19ea4d5f008b66cc42048656ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a5f37b26bfe7933088e08379662250e7b188efbbb5784d49e8d86262e10416\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0061bf86592b301a0a9398d2938e614f76616ed7ddcf310aa50c961632fb58b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20eb41ef00b7711fdfef0e4f3bd83b2fbdb193be96cafc0ef87f048271e7dd78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fea878c0275a3934d26327c13cf5a93ae3b881132788f3d031af5b382bbdd02\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 17:04:35.064086 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 17:04:35.066802 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1773129596/tls.crt::/tmp/serving-cert-1773129596/tls.key\\\\\\\"\\\\nI1205 17:04:40.565607 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 17:04:40.585450 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 17:04:40.585477 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 17:04:40.585506 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 17:04:40.585513 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:04:40.600592 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:04:40.600617 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600622 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600626 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:04:40.600629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:04:40.600632 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:04:40.600635 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:04:40.600704 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 17:04:40.602558 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0585e546af65a61265267b97083556b811ad35ee6c48fa84262b3f4f25eaf907\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:06:02Z is after 2025-08-24T17:21:41Z" Dec 05 17:06:02 crc kubenswrapper[4753]: I1205 17:06:02.169048 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8934d1ec7490b421708e4678d69a55cdd381563ada3d945d066fcf4abba49f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e799acff1e50a664e95e30896e435bc77a35c8560044c5f180e16c3534bcf828\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:06:02Z is after 2025-08-24T17:21:41Z" Dec 05 17:06:02 crc kubenswrapper[4753]: I1205 17:06:02.211057 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:02 crc kubenswrapper[4753]: I1205 17:06:02.211139 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:02 crc kubenswrapper[4753]: I1205 17:06:02.211182 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:02 crc kubenswrapper[4753]: I1205 17:06:02.211265 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:02 crc kubenswrapper[4753]: I1205 17:06:02.211284 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:02Z","lastTransitionTime":"2025-12-05T17:06:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:02 crc kubenswrapper[4753]: I1205 17:06:02.313592 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:02 crc kubenswrapper[4753]: I1205 17:06:02.313633 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:02 crc kubenswrapper[4753]: I1205 17:06:02.313645 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:02 crc kubenswrapper[4753]: I1205 17:06:02.313665 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:02 crc kubenswrapper[4753]: I1205 17:06:02.313679 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:02Z","lastTransitionTime":"2025-12-05T17:06:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:02 crc kubenswrapper[4753]: I1205 17:06:02.416741 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:02 crc kubenswrapper[4753]: I1205 17:06:02.416790 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:02 crc kubenswrapper[4753]: I1205 17:06:02.416799 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:02 crc kubenswrapper[4753]: I1205 17:06:02.416813 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:02 crc kubenswrapper[4753]: I1205 17:06:02.416822 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:02Z","lastTransitionTime":"2025-12-05T17:06:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:02 crc kubenswrapper[4753]: I1205 17:06:02.520686 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:02 crc kubenswrapper[4753]: I1205 17:06:02.520741 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:02 crc kubenswrapper[4753]: I1205 17:06:02.520751 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:02 crc kubenswrapper[4753]: I1205 17:06:02.520767 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:02 crc kubenswrapper[4753]: I1205 17:06:02.520781 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:02Z","lastTransitionTime":"2025-12-05T17:06:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:02 crc kubenswrapper[4753]: I1205 17:06:02.624526 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:02 crc kubenswrapper[4753]: I1205 17:06:02.624596 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:02 crc kubenswrapper[4753]: I1205 17:06:02.624614 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:02 crc kubenswrapper[4753]: I1205 17:06:02.624642 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:02 crc kubenswrapper[4753]: I1205 17:06:02.624663 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:02Z","lastTransitionTime":"2025-12-05T17:06:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:02 crc kubenswrapper[4753]: I1205 17:06:02.719841 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:06:02 crc kubenswrapper[4753]: I1205 17:06:02.719882 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:06:02 crc kubenswrapper[4753]: I1205 17:06:02.719961 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jjgfd" Dec 05 17:06:02 crc kubenswrapper[4753]: E1205 17:06:02.720062 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:06:02 crc kubenswrapper[4753]: E1205 17:06:02.720256 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:06:02 crc kubenswrapper[4753]: E1205 17:06:02.720383 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jjgfd" podUID="00ab636b-9cc9-4a6f-8e6e-6442b35280ca" Dec 05 17:06:02 crc kubenswrapper[4753]: I1205 17:06:02.728227 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:02 crc kubenswrapper[4753]: I1205 17:06:02.728273 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:02 crc kubenswrapper[4753]: I1205 17:06:02.728287 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:02 crc kubenswrapper[4753]: I1205 17:06:02.728308 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:02 crc kubenswrapper[4753]: I1205 17:06:02.728324 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:02Z","lastTransitionTime":"2025-12-05T17:06:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:02 crc kubenswrapper[4753]: I1205 17:06:02.830494 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:02 crc kubenswrapper[4753]: I1205 17:06:02.830563 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:02 crc kubenswrapper[4753]: I1205 17:06:02.830582 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:02 crc kubenswrapper[4753]: I1205 17:06:02.830610 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:02 crc kubenswrapper[4753]: I1205 17:06:02.830630 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:02Z","lastTransitionTime":"2025-12-05T17:06:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:02 crc kubenswrapper[4753]: I1205 17:06:02.934512 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:02 crc kubenswrapper[4753]: I1205 17:06:02.934581 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:02 crc kubenswrapper[4753]: I1205 17:06:02.934600 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:02 crc kubenswrapper[4753]: I1205 17:06:02.934626 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:02 crc kubenswrapper[4753]: I1205 17:06:02.934646 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:02Z","lastTransitionTime":"2025-12-05T17:06:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:03 crc kubenswrapper[4753]: I1205 17:06:03.038350 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:03 crc kubenswrapper[4753]: I1205 17:06:03.038846 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:03 crc kubenswrapper[4753]: I1205 17:06:03.039039 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:03 crc kubenswrapper[4753]: I1205 17:06:03.039363 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:03 crc kubenswrapper[4753]: I1205 17:06:03.039639 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:03Z","lastTransitionTime":"2025-12-05T17:06:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:03 crc kubenswrapper[4753]: I1205 17:06:03.143851 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:03 crc kubenswrapper[4753]: I1205 17:06:03.144453 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:03 crc kubenswrapper[4753]: I1205 17:06:03.144632 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:03 crc kubenswrapper[4753]: I1205 17:06:03.144769 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:03 crc kubenswrapper[4753]: I1205 17:06:03.144889 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:03Z","lastTransitionTime":"2025-12-05T17:06:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:03 crc kubenswrapper[4753]: I1205 17:06:03.250036 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:03 crc kubenswrapper[4753]: I1205 17:06:03.250117 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:03 crc kubenswrapper[4753]: I1205 17:06:03.250142 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:03 crc kubenswrapper[4753]: I1205 17:06:03.250207 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:03 crc kubenswrapper[4753]: I1205 17:06:03.250237 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:03Z","lastTransitionTime":"2025-12-05T17:06:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:03 crc kubenswrapper[4753]: I1205 17:06:03.353363 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:03 crc kubenswrapper[4753]: I1205 17:06:03.353403 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:03 crc kubenswrapper[4753]: I1205 17:06:03.353419 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:03 crc kubenswrapper[4753]: I1205 17:06:03.353439 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:03 crc kubenswrapper[4753]: I1205 17:06:03.353453 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:03Z","lastTransitionTime":"2025-12-05T17:06:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:03 crc kubenswrapper[4753]: I1205 17:06:03.456852 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:03 crc kubenswrapper[4753]: I1205 17:06:03.457359 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:03 crc kubenswrapper[4753]: I1205 17:06:03.457534 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:03 crc kubenswrapper[4753]: I1205 17:06:03.457674 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:03 crc kubenswrapper[4753]: I1205 17:06:03.457843 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:03Z","lastTransitionTime":"2025-12-05T17:06:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:03 crc kubenswrapper[4753]: I1205 17:06:03.561322 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:03 crc kubenswrapper[4753]: I1205 17:06:03.561382 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:03 crc kubenswrapper[4753]: I1205 17:06:03.561397 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:03 crc kubenswrapper[4753]: I1205 17:06:03.561419 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:03 crc kubenswrapper[4753]: I1205 17:06:03.561434 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:03Z","lastTransitionTime":"2025-12-05T17:06:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:03 crc kubenswrapper[4753]: I1205 17:06:03.663664 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:03 crc kubenswrapper[4753]: I1205 17:06:03.663719 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:03 crc kubenswrapper[4753]: I1205 17:06:03.663732 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:03 crc kubenswrapper[4753]: I1205 17:06:03.663749 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:03 crc kubenswrapper[4753]: I1205 17:06:03.663759 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:03Z","lastTransitionTime":"2025-12-05T17:06:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:03 crc kubenswrapper[4753]: I1205 17:06:03.719575 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:06:03 crc kubenswrapper[4753]: E1205 17:06:03.719735 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:06:03 crc kubenswrapper[4753]: I1205 17:06:03.766714 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:03 crc kubenswrapper[4753]: I1205 17:06:03.766762 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:03 crc kubenswrapper[4753]: I1205 17:06:03.766773 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:03 crc kubenswrapper[4753]: I1205 17:06:03.766789 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:03 crc kubenswrapper[4753]: I1205 17:06:03.766802 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:03Z","lastTransitionTime":"2025-12-05T17:06:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:03 crc kubenswrapper[4753]: I1205 17:06:03.870075 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:03 crc kubenswrapper[4753]: I1205 17:06:03.870184 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:03 crc kubenswrapper[4753]: I1205 17:06:03.870209 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:03 crc kubenswrapper[4753]: I1205 17:06:03.870244 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:03 crc kubenswrapper[4753]: I1205 17:06:03.870270 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:03Z","lastTransitionTime":"2025-12-05T17:06:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:03 crc kubenswrapper[4753]: I1205 17:06:03.973625 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:03 crc kubenswrapper[4753]: I1205 17:06:03.973698 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:03 crc kubenswrapper[4753]: I1205 17:06:03.973735 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:03 crc kubenswrapper[4753]: I1205 17:06:03.973771 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:03 crc kubenswrapper[4753]: I1205 17:06:03.973795 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:03Z","lastTransitionTime":"2025-12-05T17:06:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:04 crc kubenswrapper[4753]: I1205 17:06:04.076503 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:04 crc kubenswrapper[4753]: I1205 17:06:04.076549 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:04 crc kubenswrapper[4753]: I1205 17:06:04.076561 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:04 crc kubenswrapper[4753]: I1205 17:06:04.076575 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:04 crc kubenswrapper[4753]: I1205 17:06:04.076584 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:04Z","lastTransitionTime":"2025-12-05T17:06:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:04 crc kubenswrapper[4753]: I1205 17:06:04.179541 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:04 crc kubenswrapper[4753]: I1205 17:06:04.180044 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:04 crc kubenswrapper[4753]: I1205 17:06:04.180168 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:04 crc kubenswrapper[4753]: I1205 17:06:04.180269 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:04 crc kubenswrapper[4753]: I1205 17:06:04.181225 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:04Z","lastTransitionTime":"2025-12-05T17:06:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:04 crc kubenswrapper[4753]: I1205 17:06:04.285465 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:04 crc kubenswrapper[4753]: I1205 17:06:04.286033 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:04 crc kubenswrapper[4753]: I1205 17:06:04.286248 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:04 crc kubenswrapper[4753]: I1205 17:06:04.286495 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:04 crc kubenswrapper[4753]: I1205 17:06:04.286758 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:04Z","lastTransitionTime":"2025-12-05T17:06:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:04 crc kubenswrapper[4753]: I1205 17:06:04.388781 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:04 crc kubenswrapper[4753]: I1205 17:06:04.389340 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:04 crc kubenswrapper[4753]: I1205 17:06:04.389369 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:04 crc kubenswrapper[4753]: I1205 17:06:04.389387 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:04 crc kubenswrapper[4753]: I1205 17:06:04.389401 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:04Z","lastTransitionTime":"2025-12-05T17:06:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:04 crc kubenswrapper[4753]: I1205 17:06:04.491937 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:04 crc kubenswrapper[4753]: I1205 17:06:04.492028 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:04 crc kubenswrapper[4753]: I1205 17:06:04.492070 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:04 crc kubenswrapper[4753]: I1205 17:06:04.492110 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:04 crc kubenswrapper[4753]: I1205 17:06:04.492134 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:04Z","lastTransitionTime":"2025-12-05T17:06:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:04 crc kubenswrapper[4753]: I1205 17:06:04.595794 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:04 crc kubenswrapper[4753]: I1205 17:06:04.595860 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:04 crc kubenswrapper[4753]: I1205 17:06:04.595879 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:04 crc kubenswrapper[4753]: I1205 17:06:04.595904 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:04 crc kubenswrapper[4753]: I1205 17:06:04.595921 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:04Z","lastTransitionTime":"2025-12-05T17:06:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:04 crc kubenswrapper[4753]: I1205 17:06:04.699253 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:04 crc kubenswrapper[4753]: I1205 17:06:04.699338 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:04 crc kubenswrapper[4753]: I1205 17:06:04.699366 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:04 crc kubenswrapper[4753]: I1205 17:06:04.699404 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:04 crc kubenswrapper[4753]: I1205 17:06:04.699428 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:04Z","lastTransitionTime":"2025-12-05T17:06:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:04 crc kubenswrapper[4753]: I1205 17:06:04.719529 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:06:04 crc kubenswrapper[4753]: I1205 17:06:04.719529 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:06:04 crc kubenswrapper[4753]: E1205 17:06:04.719769 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:06:04 crc kubenswrapper[4753]: E1205 17:06:04.719908 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:06:04 crc kubenswrapper[4753]: I1205 17:06:04.720220 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jjgfd" Dec 05 17:06:04 crc kubenswrapper[4753]: E1205 17:06:04.720569 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jjgfd" podUID="00ab636b-9cc9-4a6f-8e6e-6442b35280ca" Dec 05 17:06:04 crc kubenswrapper[4753]: I1205 17:06:04.802583 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:04 crc kubenswrapper[4753]: I1205 17:06:04.802912 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:04 crc kubenswrapper[4753]: I1205 17:06:04.802994 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:04 crc kubenswrapper[4753]: I1205 17:06:04.803134 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:04 crc kubenswrapper[4753]: I1205 17:06:04.803246 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:04Z","lastTransitionTime":"2025-12-05T17:06:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:04 crc kubenswrapper[4753]: I1205 17:06:04.905768 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:04 crc kubenswrapper[4753]: I1205 17:06:04.905862 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:04 crc kubenswrapper[4753]: I1205 17:06:04.905880 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:04 crc kubenswrapper[4753]: I1205 17:06:04.905907 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:04 crc kubenswrapper[4753]: I1205 17:06:04.905923 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:04Z","lastTransitionTime":"2025-12-05T17:06:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:05 crc kubenswrapper[4753]: I1205 17:06:05.008916 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:05 crc kubenswrapper[4753]: I1205 17:06:05.008988 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:05 crc kubenswrapper[4753]: I1205 17:06:05.009008 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:05 crc kubenswrapper[4753]: I1205 17:06:05.009039 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:05 crc kubenswrapper[4753]: I1205 17:06:05.009061 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:05Z","lastTransitionTime":"2025-12-05T17:06:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:05 crc kubenswrapper[4753]: I1205 17:06:05.112384 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:05 crc kubenswrapper[4753]: I1205 17:06:05.112442 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:05 crc kubenswrapper[4753]: I1205 17:06:05.112454 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:05 crc kubenswrapper[4753]: I1205 17:06:05.112473 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:05 crc kubenswrapper[4753]: I1205 17:06:05.112486 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:05Z","lastTransitionTime":"2025-12-05T17:06:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:05 crc kubenswrapper[4753]: I1205 17:06:05.216182 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:05 crc kubenswrapper[4753]: I1205 17:06:05.216251 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:05 crc kubenswrapper[4753]: I1205 17:06:05.216262 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:05 crc kubenswrapper[4753]: I1205 17:06:05.216281 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:05 crc kubenswrapper[4753]: I1205 17:06:05.216294 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:05Z","lastTransitionTime":"2025-12-05T17:06:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:05 crc kubenswrapper[4753]: I1205 17:06:05.320210 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:05 crc kubenswrapper[4753]: I1205 17:06:05.320284 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:05 crc kubenswrapper[4753]: I1205 17:06:05.320305 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:05 crc kubenswrapper[4753]: I1205 17:06:05.320335 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:05 crc kubenswrapper[4753]: I1205 17:06:05.320358 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:05Z","lastTransitionTime":"2025-12-05T17:06:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:05 crc kubenswrapper[4753]: I1205 17:06:05.424367 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:05 crc kubenswrapper[4753]: I1205 17:06:05.424458 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:05 crc kubenswrapper[4753]: I1205 17:06:05.424478 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:05 crc kubenswrapper[4753]: I1205 17:06:05.424511 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:05 crc kubenswrapper[4753]: I1205 17:06:05.424533 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:05Z","lastTransitionTime":"2025-12-05T17:06:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:05 crc kubenswrapper[4753]: I1205 17:06:05.528513 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:05 crc kubenswrapper[4753]: I1205 17:06:05.528579 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:05 crc kubenswrapper[4753]: I1205 17:06:05.528591 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:05 crc kubenswrapper[4753]: I1205 17:06:05.528617 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:05 crc kubenswrapper[4753]: I1205 17:06:05.528636 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:05Z","lastTransitionTime":"2025-12-05T17:06:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:05 crc kubenswrapper[4753]: I1205 17:06:05.632748 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:05 crc kubenswrapper[4753]: I1205 17:06:05.632811 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:05 crc kubenswrapper[4753]: I1205 17:06:05.632826 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:05 crc kubenswrapper[4753]: I1205 17:06:05.632847 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:05 crc kubenswrapper[4753]: I1205 17:06:05.632865 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:05Z","lastTransitionTime":"2025-12-05T17:06:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:05 crc kubenswrapper[4753]: I1205 17:06:05.720344 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:06:05 crc kubenswrapper[4753]: E1205 17:06:05.720580 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:06:05 crc kubenswrapper[4753]: I1205 17:06:05.735234 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:05 crc kubenswrapper[4753]: I1205 17:06:05.735275 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:05 crc kubenswrapper[4753]: I1205 17:06:05.735285 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:05 crc kubenswrapper[4753]: I1205 17:06:05.735304 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:05 crc kubenswrapper[4753]: I1205 17:06:05.735318 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:05Z","lastTransitionTime":"2025-12-05T17:06:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:05 crc kubenswrapper[4753]: I1205 17:06:05.838487 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:05 crc kubenswrapper[4753]: I1205 17:06:05.838560 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:05 crc kubenswrapper[4753]: I1205 17:06:05.838573 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:05 crc kubenswrapper[4753]: I1205 17:06:05.838588 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:05 crc kubenswrapper[4753]: I1205 17:06:05.838598 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:05Z","lastTransitionTime":"2025-12-05T17:06:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:05 crc kubenswrapper[4753]: I1205 17:06:05.941281 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:05 crc kubenswrapper[4753]: I1205 17:06:05.941408 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:05 crc kubenswrapper[4753]: I1205 17:06:05.941444 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:05 crc kubenswrapper[4753]: I1205 17:06:05.941484 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:05 crc kubenswrapper[4753]: I1205 17:06:05.941521 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:05Z","lastTransitionTime":"2025-12-05T17:06:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:06 crc kubenswrapper[4753]: I1205 17:06:06.044336 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:06 crc kubenswrapper[4753]: I1205 17:06:06.044392 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:06 crc kubenswrapper[4753]: I1205 17:06:06.044403 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:06 crc kubenswrapper[4753]: I1205 17:06:06.044475 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:06 crc kubenswrapper[4753]: I1205 17:06:06.044488 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:06Z","lastTransitionTime":"2025-12-05T17:06:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:06 crc kubenswrapper[4753]: I1205 17:06:06.148078 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:06 crc kubenswrapper[4753]: I1205 17:06:06.148199 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:06 crc kubenswrapper[4753]: I1205 17:06:06.148238 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:06 crc kubenswrapper[4753]: I1205 17:06:06.148273 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:06 crc kubenswrapper[4753]: I1205 17:06:06.148296 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:06Z","lastTransitionTime":"2025-12-05T17:06:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:06 crc kubenswrapper[4753]: I1205 17:06:06.252844 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:06 crc kubenswrapper[4753]: I1205 17:06:06.252917 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:06 crc kubenswrapper[4753]: I1205 17:06:06.252935 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:06 crc kubenswrapper[4753]: I1205 17:06:06.252964 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:06 crc kubenswrapper[4753]: I1205 17:06:06.252984 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:06Z","lastTransitionTime":"2025-12-05T17:06:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:06 crc kubenswrapper[4753]: I1205 17:06:06.356673 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:06 crc kubenswrapper[4753]: I1205 17:06:06.356818 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:06 crc kubenswrapper[4753]: I1205 17:06:06.356842 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:06 crc kubenswrapper[4753]: I1205 17:06:06.356878 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:06 crc kubenswrapper[4753]: I1205 17:06:06.356900 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:06Z","lastTransitionTime":"2025-12-05T17:06:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:06 crc kubenswrapper[4753]: I1205 17:06:06.460431 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:06 crc kubenswrapper[4753]: I1205 17:06:06.460493 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:06 crc kubenswrapper[4753]: I1205 17:06:06.460516 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:06 crc kubenswrapper[4753]: I1205 17:06:06.460546 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:06 crc kubenswrapper[4753]: I1205 17:06:06.460569 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:06Z","lastTransitionTime":"2025-12-05T17:06:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:06 crc kubenswrapper[4753]: I1205 17:06:06.564324 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:06 crc kubenswrapper[4753]: I1205 17:06:06.564389 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:06 crc kubenswrapper[4753]: I1205 17:06:06.564404 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:06 crc kubenswrapper[4753]: I1205 17:06:06.564426 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:06 crc kubenswrapper[4753]: I1205 17:06:06.564442 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:06Z","lastTransitionTime":"2025-12-05T17:06:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:06 crc kubenswrapper[4753]: I1205 17:06:06.667978 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:06 crc kubenswrapper[4753]: I1205 17:06:06.668049 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:06 crc kubenswrapper[4753]: I1205 17:06:06.668067 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:06 crc kubenswrapper[4753]: I1205 17:06:06.668097 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:06 crc kubenswrapper[4753]: I1205 17:06:06.668118 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:06Z","lastTransitionTime":"2025-12-05T17:06:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:06 crc kubenswrapper[4753]: I1205 17:06:06.719843 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:06:06 crc kubenswrapper[4753]: E1205 17:06:06.720006 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:06:06 crc kubenswrapper[4753]: I1205 17:06:06.720025 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:06:06 crc kubenswrapper[4753]: I1205 17:06:06.720081 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jjgfd" Dec 05 17:06:06 crc kubenswrapper[4753]: E1205 17:06:06.720553 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:06:06 crc kubenswrapper[4753]: E1205 17:06:06.720675 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jjgfd" podUID="00ab636b-9cc9-4a6f-8e6e-6442b35280ca" Dec 05 17:06:06 crc kubenswrapper[4753]: I1205 17:06:06.771883 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:06 crc kubenswrapper[4753]: I1205 17:06:06.771967 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:06 crc kubenswrapper[4753]: I1205 17:06:06.771988 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:06 crc kubenswrapper[4753]: I1205 17:06:06.772021 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:06 crc kubenswrapper[4753]: I1205 17:06:06.772045 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:06Z","lastTransitionTime":"2025-12-05T17:06:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:06 crc kubenswrapper[4753]: I1205 17:06:06.875873 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:06 crc kubenswrapper[4753]: I1205 17:06:06.875942 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:06 crc kubenswrapper[4753]: I1205 17:06:06.875960 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:06 crc kubenswrapper[4753]: I1205 17:06:06.875987 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:06 crc kubenswrapper[4753]: I1205 17:06:06.876006 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:06Z","lastTransitionTime":"2025-12-05T17:06:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:06 crc kubenswrapper[4753]: I1205 17:06:06.979924 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:06 crc kubenswrapper[4753]: I1205 17:06:06.980073 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:06 crc kubenswrapper[4753]: I1205 17:06:06.980096 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:06 crc kubenswrapper[4753]: I1205 17:06:06.980133 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:06 crc kubenswrapper[4753]: I1205 17:06:06.980200 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:06Z","lastTransitionTime":"2025-12-05T17:06:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:07 crc kubenswrapper[4753]: I1205 17:06:07.083810 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:07 crc kubenswrapper[4753]: I1205 17:06:07.083872 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:07 crc kubenswrapper[4753]: I1205 17:06:07.083891 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:07 crc kubenswrapper[4753]: I1205 17:06:07.083918 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:07 crc kubenswrapper[4753]: I1205 17:06:07.083936 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:07Z","lastTransitionTime":"2025-12-05T17:06:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:07 crc kubenswrapper[4753]: I1205 17:06:07.186770 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:07 crc kubenswrapper[4753]: I1205 17:06:07.186858 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:07 crc kubenswrapper[4753]: I1205 17:06:07.186879 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:07 crc kubenswrapper[4753]: I1205 17:06:07.186902 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:07 crc kubenswrapper[4753]: I1205 17:06:07.186914 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:07Z","lastTransitionTime":"2025-12-05T17:06:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:07 crc kubenswrapper[4753]: I1205 17:06:07.290087 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:07 crc kubenswrapper[4753]: I1205 17:06:07.290171 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:07 crc kubenswrapper[4753]: I1205 17:06:07.290183 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:07 crc kubenswrapper[4753]: I1205 17:06:07.290202 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:07 crc kubenswrapper[4753]: I1205 17:06:07.290213 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:07Z","lastTransitionTime":"2025-12-05T17:06:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:07 crc kubenswrapper[4753]: I1205 17:06:07.393018 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:07 crc kubenswrapper[4753]: I1205 17:06:07.393053 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:07 crc kubenswrapper[4753]: I1205 17:06:07.393061 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:07 crc kubenswrapper[4753]: I1205 17:06:07.393074 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:07 crc kubenswrapper[4753]: I1205 17:06:07.393084 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:07Z","lastTransitionTime":"2025-12-05T17:06:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:07 crc kubenswrapper[4753]: I1205 17:06:07.456078 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:07 crc kubenswrapper[4753]: I1205 17:06:07.456172 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:07 crc kubenswrapper[4753]: I1205 17:06:07.456192 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:07 crc kubenswrapper[4753]: I1205 17:06:07.456220 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:07 crc kubenswrapper[4753]: I1205 17:06:07.456241 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:07Z","lastTransitionTime":"2025-12-05T17:06:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:07 crc kubenswrapper[4753]: E1205 17:06:07.480629 4753 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:06:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:06:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:06:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:06:07Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:06:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:06:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:06:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:06:07Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6cf3541b-b15a-4035-99a1-dd4a7b86e07c\\\",\\\"systemUUID\\\":\\\"6ae126b1-60b3-4aa4-8711-3da4c1b89426\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:06:07Z is after 2025-08-24T17:21:41Z" Dec 05 17:06:07 crc kubenswrapper[4753]: I1205 17:06:07.485624 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:07 crc kubenswrapper[4753]: I1205 17:06:07.485689 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:07 crc kubenswrapper[4753]: I1205 17:06:07.485700 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:07 crc kubenswrapper[4753]: I1205 17:06:07.485719 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:07 crc kubenswrapper[4753]: I1205 17:06:07.485732 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:07Z","lastTransitionTime":"2025-12-05T17:06:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:07 crc kubenswrapper[4753]: E1205 17:06:07.504661 4753 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:06:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:06:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:06:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:06:07Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:06:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:06:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:06:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:06:07Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6cf3541b-b15a-4035-99a1-dd4a7b86e07c\\\",\\\"systemUUID\\\":\\\"6ae126b1-60b3-4aa4-8711-3da4c1b89426\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:06:07Z is after 2025-08-24T17:21:41Z" Dec 05 17:06:07 crc kubenswrapper[4753]: I1205 17:06:07.510380 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:07 crc kubenswrapper[4753]: I1205 17:06:07.510445 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:07 crc kubenswrapper[4753]: I1205 17:06:07.510465 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:07 crc kubenswrapper[4753]: I1205 17:06:07.510498 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:07 crc kubenswrapper[4753]: I1205 17:06:07.510519 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:07Z","lastTransitionTime":"2025-12-05T17:06:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:07 crc kubenswrapper[4753]: E1205 17:06:07.531700 4753 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:06:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:06:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:06:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:06:07Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:06:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:06:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:06:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:06:07Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6cf3541b-b15a-4035-99a1-dd4a7b86e07c\\\",\\\"systemUUID\\\":\\\"6ae126b1-60b3-4aa4-8711-3da4c1b89426\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:06:07Z is after 2025-08-24T17:21:41Z" Dec 05 17:06:07 crc kubenswrapper[4753]: I1205 17:06:07.537119 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:07 crc kubenswrapper[4753]: I1205 17:06:07.537232 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:07 crc kubenswrapper[4753]: I1205 17:06:07.537254 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:07 crc kubenswrapper[4753]: I1205 17:06:07.537288 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:07 crc kubenswrapper[4753]: I1205 17:06:07.537314 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:07Z","lastTransitionTime":"2025-12-05T17:06:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:07 crc kubenswrapper[4753]: E1205 17:06:07.559881 4753 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:06:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:06:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:06:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:06:07Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:06:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:06:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:06:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:06:07Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6cf3541b-b15a-4035-99a1-dd4a7b86e07c\\\",\\\"systemUUID\\\":\\\"6ae126b1-60b3-4aa4-8711-3da4c1b89426\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:06:07Z is after 2025-08-24T17:21:41Z" Dec 05 17:06:07 crc kubenswrapper[4753]: I1205 17:06:07.564891 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:07 crc kubenswrapper[4753]: I1205 17:06:07.564977 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:07 crc kubenswrapper[4753]: I1205 17:06:07.564991 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:07 crc kubenswrapper[4753]: I1205 17:06:07.565015 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:07 crc kubenswrapper[4753]: I1205 17:06:07.565029 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:07Z","lastTransitionTime":"2025-12-05T17:06:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:07 crc kubenswrapper[4753]: E1205 17:06:07.584985 4753 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:06:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:06:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:06:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:06:07Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:06:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:06:07Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:06:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:06:07Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"6cf3541b-b15a-4035-99a1-dd4a7b86e07c\\\",\\\"systemUUID\\\":\\\"6ae126b1-60b3-4aa4-8711-3da4c1b89426\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:06:07Z is after 2025-08-24T17:21:41Z" Dec 05 17:06:07 crc kubenswrapper[4753]: E1205 17:06:07.585116 4753 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 17:06:07 crc kubenswrapper[4753]: I1205 17:06:07.587086 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:07 crc kubenswrapper[4753]: I1205 17:06:07.587134 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:07 crc kubenswrapper[4753]: I1205 17:06:07.587170 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:07 crc kubenswrapper[4753]: I1205 17:06:07.587191 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:07 crc kubenswrapper[4753]: I1205 17:06:07.587204 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:07Z","lastTransitionTime":"2025-12-05T17:06:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:07 crc kubenswrapper[4753]: I1205 17:06:07.690842 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:07 crc kubenswrapper[4753]: I1205 17:06:07.690917 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:07 crc kubenswrapper[4753]: I1205 17:06:07.690930 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:07 crc kubenswrapper[4753]: I1205 17:06:07.690946 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:07 crc kubenswrapper[4753]: I1205 17:06:07.690956 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:07Z","lastTransitionTime":"2025-12-05T17:06:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:07 crc kubenswrapper[4753]: I1205 17:06:07.720130 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:06:07 crc kubenswrapper[4753]: E1205 17:06:07.720312 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:06:07 crc kubenswrapper[4753]: I1205 17:06:07.794434 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:07 crc kubenswrapper[4753]: I1205 17:06:07.794532 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:07 crc kubenswrapper[4753]: I1205 17:06:07.794640 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:07 crc kubenswrapper[4753]: I1205 17:06:07.794673 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:07 crc kubenswrapper[4753]: I1205 17:06:07.794693 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:07Z","lastTransitionTime":"2025-12-05T17:06:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:07 crc kubenswrapper[4753]: I1205 17:06:07.898023 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:07 crc kubenswrapper[4753]: I1205 17:06:07.898084 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:07 crc kubenswrapper[4753]: I1205 17:06:07.898094 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:07 crc kubenswrapper[4753]: I1205 17:06:07.898113 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:07 crc kubenswrapper[4753]: I1205 17:06:07.898127 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:07Z","lastTransitionTime":"2025-12-05T17:06:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:08 crc kubenswrapper[4753]: I1205 17:06:08.002326 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:08 crc kubenswrapper[4753]: I1205 17:06:08.002374 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:08 crc kubenswrapper[4753]: I1205 17:06:08.002382 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:08 crc kubenswrapper[4753]: I1205 17:06:08.002398 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:08 crc kubenswrapper[4753]: I1205 17:06:08.002410 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:08Z","lastTransitionTime":"2025-12-05T17:06:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:08 crc kubenswrapper[4753]: I1205 17:06:08.106139 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:08 crc kubenswrapper[4753]: I1205 17:06:08.106250 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:08 crc kubenswrapper[4753]: I1205 17:06:08.106268 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:08 crc kubenswrapper[4753]: I1205 17:06:08.106294 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:08 crc kubenswrapper[4753]: I1205 17:06:08.106313 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:08Z","lastTransitionTime":"2025-12-05T17:06:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:08 crc kubenswrapper[4753]: I1205 17:06:08.208356 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:08 crc kubenswrapper[4753]: I1205 17:06:08.208420 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:08 crc kubenswrapper[4753]: I1205 17:06:08.208441 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:08 crc kubenswrapper[4753]: I1205 17:06:08.208466 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:08 crc kubenswrapper[4753]: I1205 17:06:08.208482 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:08Z","lastTransitionTime":"2025-12-05T17:06:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:08 crc kubenswrapper[4753]: I1205 17:06:08.312121 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:08 crc kubenswrapper[4753]: I1205 17:06:08.312196 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:08 crc kubenswrapper[4753]: I1205 17:06:08.312209 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:08 crc kubenswrapper[4753]: I1205 17:06:08.312230 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:08 crc kubenswrapper[4753]: I1205 17:06:08.312269 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:08Z","lastTransitionTime":"2025-12-05T17:06:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:08 crc kubenswrapper[4753]: I1205 17:06:08.414715 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:08 crc kubenswrapper[4753]: I1205 17:06:08.414789 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:08 crc kubenswrapper[4753]: I1205 17:06:08.414806 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:08 crc kubenswrapper[4753]: I1205 17:06:08.414829 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:08 crc kubenswrapper[4753]: I1205 17:06:08.414846 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:08Z","lastTransitionTime":"2025-12-05T17:06:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:08 crc kubenswrapper[4753]: I1205 17:06:08.518514 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:08 crc kubenswrapper[4753]: I1205 17:06:08.518579 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:08 crc kubenswrapper[4753]: I1205 17:06:08.518591 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:08 crc kubenswrapper[4753]: I1205 17:06:08.518612 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:08 crc kubenswrapper[4753]: I1205 17:06:08.518629 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:08Z","lastTransitionTime":"2025-12-05T17:06:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:08 crc kubenswrapper[4753]: I1205 17:06:08.622658 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:08 crc kubenswrapper[4753]: I1205 17:06:08.622723 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:08 crc kubenswrapper[4753]: I1205 17:06:08.622735 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:08 crc kubenswrapper[4753]: I1205 17:06:08.622754 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:08 crc kubenswrapper[4753]: I1205 17:06:08.622773 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:08Z","lastTransitionTime":"2025-12-05T17:06:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:08 crc kubenswrapper[4753]: I1205 17:06:08.720091 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jjgfd" Dec 05 17:06:08 crc kubenswrapper[4753]: I1205 17:06:08.720175 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:06:08 crc kubenswrapper[4753]: I1205 17:06:08.720091 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:06:08 crc kubenswrapper[4753]: E1205 17:06:08.720387 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:06:08 crc kubenswrapper[4753]: E1205 17:06:08.720404 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jjgfd" podUID="00ab636b-9cc9-4a6f-8e6e-6442b35280ca" Dec 05 17:06:08 crc kubenswrapper[4753]: E1205 17:06:08.720455 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:06:08 crc kubenswrapper[4753]: I1205 17:06:08.725091 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:08 crc kubenswrapper[4753]: I1205 17:06:08.725175 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:08 crc kubenswrapper[4753]: I1205 17:06:08.725199 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:08 crc kubenswrapper[4753]: I1205 17:06:08.725219 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:08 crc kubenswrapper[4753]: I1205 17:06:08.725232 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:08Z","lastTransitionTime":"2025-12-05T17:06:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:08 crc kubenswrapper[4753]: I1205 17:06:08.827900 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:08 crc kubenswrapper[4753]: I1205 17:06:08.827974 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:08 crc kubenswrapper[4753]: I1205 17:06:08.827986 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:08 crc kubenswrapper[4753]: I1205 17:06:08.828001 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:08 crc kubenswrapper[4753]: I1205 17:06:08.828013 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:08Z","lastTransitionTime":"2025-12-05T17:06:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:08 crc kubenswrapper[4753]: I1205 17:06:08.933667 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:08 crc kubenswrapper[4753]: I1205 17:06:08.933776 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:08 crc kubenswrapper[4753]: I1205 17:06:08.933808 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:08 crc kubenswrapper[4753]: I1205 17:06:08.933869 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:08 crc kubenswrapper[4753]: I1205 17:06:08.933901 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:08Z","lastTransitionTime":"2025-12-05T17:06:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:09 crc kubenswrapper[4753]: I1205 17:06:09.036500 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:09 crc kubenswrapper[4753]: I1205 17:06:09.036544 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:09 crc kubenswrapper[4753]: I1205 17:06:09.036554 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:09 crc kubenswrapper[4753]: I1205 17:06:09.036567 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:09 crc kubenswrapper[4753]: I1205 17:06:09.036577 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:09Z","lastTransitionTime":"2025-12-05T17:06:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:09 crc kubenswrapper[4753]: I1205 17:06:09.138969 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:09 crc kubenswrapper[4753]: I1205 17:06:09.139007 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:09 crc kubenswrapper[4753]: I1205 17:06:09.139016 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:09 crc kubenswrapper[4753]: I1205 17:06:09.139032 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:09 crc kubenswrapper[4753]: I1205 17:06:09.139041 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:09Z","lastTransitionTime":"2025-12-05T17:06:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:09 crc kubenswrapper[4753]: I1205 17:06:09.241144 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:09 crc kubenswrapper[4753]: I1205 17:06:09.241205 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:09 crc kubenswrapper[4753]: I1205 17:06:09.241216 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:09 crc kubenswrapper[4753]: I1205 17:06:09.241232 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:09 crc kubenswrapper[4753]: I1205 17:06:09.241242 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:09Z","lastTransitionTime":"2025-12-05T17:06:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:09 crc kubenswrapper[4753]: I1205 17:06:09.343422 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:09 crc kubenswrapper[4753]: I1205 17:06:09.343456 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:09 crc kubenswrapper[4753]: I1205 17:06:09.343467 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:09 crc kubenswrapper[4753]: I1205 17:06:09.343483 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:09 crc kubenswrapper[4753]: I1205 17:06:09.343495 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:09Z","lastTransitionTime":"2025-12-05T17:06:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:09 crc kubenswrapper[4753]: I1205 17:06:09.446615 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:09 crc kubenswrapper[4753]: I1205 17:06:09.446693 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:09 crc kubenswrapper[4753]: I1205 17:06:09.446723 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:09 crc kubenswrapper[4753]: I1205 17:06:09.446753 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:09 crc kubenswrapper[4753]: I1205 17:06:09.446777 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:09Z","lastTransitionTime":"2025-12-05T17:06:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:09 crc kubenswrapper[4753]: I1205 17:06:09.549794 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:09 crc kubenswrapper[4753]: I1205 17:06:09.549830 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:09 crc kubenswrapper[4753]: I1205 17:06:09.549840 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:09 crc kubenswrapper[4753]: I1205 17:06:09.549857 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:09 crc kubenswrapper[4753]: I1205 17:06:09.549868 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:09Z","lastTransitionTime":"2025-12-05T17:06:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:09 crc kubenswrapper[4753]: I1205 17:06:09.652415 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:09 crc kubenswrapper[4753]: I1205 17:06:09.652460 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:09 crc kubenswrapper[4753]: I1205 17:06:09.652472 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:09 crc kubenswrapper[4753]: I1205 17:06:09.652490 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:09 crc kubenswrapper[4753]: I1205 17:06:09.652502 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:09Z","lastTransitionTime":"2025-12-05T17:06:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:09 crc kubenswrapper[4753]: I1205 17:06:09.720423 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:06:09 crc kubenswrapper[4753]: E1205 17:06:09.720595 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:06:09 crc kubenswrapper[4753]: I1205 17:06:09.754186 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:09 crc kubenswrapper[4753]: I1205 17:06:09.754225 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:09 crc kubenswrapper[4753]: I1205 17:06:09.754237 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:09 crc kubenswrapper[4753]: I1205 17:06:09.754251 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:09 crc kubenswrapper[4753]: I1205 17:06:09.754261 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:09Z","lastTransitionTime":"2025-12-05T17:06:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:09 crc kubenswrapper[4753]: I1205 17:06:09.856039 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:09 crc kubenswrapper[4753]: I1205 17:06:09.856078 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:09 crc kubenswrapper[4753]: I1205 17:06:09.856088 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:09 crc kubenswrapper[4753]: I1205 17:06:09.856103 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:09 crc kubenswrapper[4753]: I1205 17:06:09.856113 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:09Z","lastTransitionTime":"2025-12-05T17:06:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:09 crc kubenswrapper[4753]: I1205 17:06:09.958500 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:09 crc kubenswrapper[4753]: I1205 17:06:09.958561 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:09 crc kubenswrapper[4753]: I1205 17:06:09.958582 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:09 crc kubenswrapper[4753]: I1205 17:06:09.958611 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:09 crc kubenswrapper[4753]: I1205 17:06:09.958631 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:09Z","lastTransitionTime":"2025-12-05T17:06:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:10 crc kubenswrapper[4753]: I1205 17:06:10.061203 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:10 crc kubenswrapper[4753]: I1205 17:06:10.061240 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:10 crc kubenswrapper[4753]: I1205 17:06:10.061254 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:10 crc kubenswrapper[4753]: I1205 17:06:10.061269 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:10 crc kubenswrapper[4753]: I1205 17:06:10.061280 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:10Z","lastTransitionTime":"2025-12-05T17:06:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:10 crc kubenswrapper[4753]: I1205 17:06:10.163806 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:10 crc kubenswrapper[4753]: I1205 17:06:10.163838 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:10 crc kubenswrapper[4753]: I1205 17:06:10.163847 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:10 crc kubenswrapper[4753]: I1205 17:06:10.163859 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:10 crc kubenswrapper[4753]: I1205 17:06:10.163868 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:10Z","lastTransitionTime":"2025-12-05T17:06:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:10 crc kubenswrapper[4753]: I1205 17:06:10.265991 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:10 crc kubenswrapper[4753]: I1205 17:06:10.266042 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:10 crc kubenswrapper[4753]: I1205 17:06:10.266053 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:10 crc kubenswrapper[4753]: I1205 17:06:10.266069 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:10 crc kubenswrapper[4753]: I1205 17:06:10.266079 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:10Z","lastTransitionTime":"2025-12-05T17:06:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:10 crc kubenswrapper[4753]: I1205 17:06:10.368977 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:10 crc kubenswrapper[4753]: I1205 17:06:10.369019 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:10 crc kubenswrapper[4753]: I1205 17:06:10.369028 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:10 crc kubenswrapper[4753]: I1205 17:06:10.369045 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:10 crc kubenswrapper[4753]: I1205 17:06:10.369056 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:10Z","lastTransitionTime":"2025-12-05T17:06:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:10 crc kubenswrapper[4753]: I1205 17:06:10.471310 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:10 crc kubenswrapper[4753]: I1205 17:06:10.471346 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:10 crc kubenswrapper[4753]: I1205 17:06:10.471355 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:10 crc kubenswrapper[4753]: I1205 17:06:10.471370 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:10 crc kubenswrapper[4753]: I1205 17:06:10.471380 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:10Z","lastTransitionTime":"2025-12-05T17:06:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:10 crc kubenswrapper[4753]: I1205 17:06:10.573167 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:10 crc kubenswrapper[4753]: I1205 17:06:10.573199 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:10 crc kubenswrapper[4753]: I1205 17:06:10.573207 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:10 crc kubenswrapper[4753]: I1205 17:06:10.573221 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:10 crc kubenswrapper[4753]: I1205 17:06:10.573231 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:10Z","lastTransitionTime":"2025-12-05T17:06:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:10 crc kubenswrapper[4753]: I1205 17:06:10.676247 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:10 crc kubenswrapper[4753]: I1205 17:06:10.676296 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:10 crc kubenswrapper[4753]: I1205 17:06:10.676309 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:10 crc kubenswrapper[4753]: I1205 17:06:10.676334 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:10 crc kubenswrapper[4753]: I1205 17:06:10.676378 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:10Z","lastTransitionTime":"2025-12-05T17:06:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:10 crc kubenswrapper[4753]: I1205 17:06:10.719850 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:06:10 crc kubenswrapper[4753]: I1205 17:06:10.719893 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:06:10 crc kubenswrapper[4753]: E1205 17:06:10.719995 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:06:10 crc kubenswrapper[4753]: I1205 17:06:10.720081 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jjgfd" Dec 05 17:06:10 crc kubenswrapper[4753]: E1205 17:06:10.720205 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:06:10 crc kubenswrapper[4753]: E1205 17:06:10.720452 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jjgfd" podUID="00ab636b-9cc9-4a6f-8e6e-6442b35280ca" Dec 05 17:06:10 crc kubenswrapper[4753]: I1205 17:06:10.779133 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:10 crc kubenswrapper[4753]: I1205 17:06:10.779196 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:10 crc kubenswrapper[4753]: I1205 17:06:10.779206 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:10 crc kubenswrapper[4753]: I1205 17:06:10.779227 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:10 crc kubenswrapper[4753]: I1205 17:06:10.779237 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:10Z","lastTransitionTime":"2025-12-05T17:06:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:10 crc kubenswrapper[4753]: I1205 17:06:10.882534 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:10 crc kubenswrapper[4753]: I1205 17:06:10.882597 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:10 crc kubenswrapper[4753]: I1205 17:06:10.882607 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:10 crc kubenswrapper[4753]: I1205 17:06:10.882621 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:10 crc kubenswrapper[4753]: I1205 17:06:10.882634 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:10Z","lastTransitionTime":"2025-12-05T17:06:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:10 crc kubenswrapper[4753]: I1205 17:06:10.985299 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:10 crc kubenswrapper[4753]: I1205 17:06:10.985355 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:10 crc kubenswrapper[4753]: I1205 17:06:10.985366 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:10 crc kubenswrapper[4753]: I1205 17:06:10.985381 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:10 crc kubenswrapper[4753]: I1205 17:06:10.985394 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:10Z","lastTransitionTime":"2025-12-05T17:06:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:11 crc kubenswrapper[4753]: I1205 17:06:11.088608 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:11 crc kubenswrapper[4753]: I1205 17:06:11.088633 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:11 crc kubenswrapper[4753]: I1205 17:06:11.088641 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:11 crc kubenswrapper[4753]: I1205 17:06:11.088653 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:11 crc kubenswrapper[4753]: I1205 17:06:11.088663 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:11Z","lastTransitionTime":"2025-12-05T17:06:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:11 crc kubenswrapper[4753]: I1205 17:06:11.191207 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:11 crc kubenswrapper[4753]: I1205 17:06:11.191259 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:11 crc kubenswrapper[4753]: I1205 17:06:11.191279 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:11 crc kubenswrapper[4753]: I1205 17:06:11.191307 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:11 crc kubenswrapper[4753]: I1205 17:06:11.191327 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:11Z","lastTransitionTime":"2025-12-05T17:06:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:11 crc kubenswrapper[4753]: I1205 17:06:11.294926 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:11 crc kubenswrapper[4753]: I1205 17:06:11.294988 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:11 crc kubenswrapper[4753]: I1205 17:06:11.295006 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:11 crc kubenswrapper[4753]: I1205 17:06:11.295034 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:11 crc kubenswrapper[4753]: I1205 17:06:11.295053 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:11Z","lastTransitionTime":"2025-12-05T17:06:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:11 crc kubenswrapper[4753]: I1205 17:06:11.398072 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:11 crc kubenswrapper[4753]: I1205 17:06:11.398127 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:11 crc kubenswrapper[4753]: I1205 17:06:11.398177 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:11 crc kubenswrapper[4753]: I1205 17:06:11.398226 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:11 crc kubenswrapper[4753]: I1205 17:06:11.398243 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:11Z","lastTransitionTime":"2025-12-05T17:06:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:11 crc kubenswrapper[4753]: I1205 17:06:11.501840 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:11 crc kubenswrapper[4753]: I1205 17:06:11.501901 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:11 crc kubenswrapper[4753]: I1205 17:06:11.501919 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:11 crc kubenswrapper[4753]: I1205 17:06:11.501943 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:11 crc kubenswrapper[4753]: I1205 17:06:11.501959 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:11Z","lastTransitionTime":"2025-12-05T17:06:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:11 crc kubenswrapper[4753]: I1205 17:06:11.604048 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:11 crc kubenswrapper[4753]: I1205 17:06:11.604089 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:11 crc kubenswrapper[4753]: I1205 17:06:11.604098 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:11 crc kubenswrapper[4753]: I1205 17:06:11.604112 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:11 crc kubenswrapper[4753]: I1205 17:06:11.604124 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:11Z","lastTransitionTime":"2025-12-05T17:06:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:11 crc kubenswrapper[4753]: I1205 17:06:11.706250 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:11 crc kubenswrapper[4753]: I1205 17:06:11.706297 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:11 crc kubenswrapper[4753]: I1205 17:06:11.706309 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:11 crc kubenswrapper[4753]: I1205 17:06:11.706325 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:11 crc kubenswrapper[4753]: I1205 17:06:11.706573 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:11Z","lastTransitionTime":"2025-12-05T17:06:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:11 crc kubenswrapper[4753]: I1205 17:06:11.719903 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:06:11 crc kubenswrapper[4753]: E1205 17:06:11.720218 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:06:11 crc kubenswrapper[4753]: I1205 17:06:11.742785 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-hpl8r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6b3d3501-4f16-4375-adf2-fd54b1cd13cf\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:05:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1096ebd14458819c0dbe4f5a86f2812eb19236d7d5062bfe2eda62ecdc05c55e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8b2cc49130697a15123f202462d2f0781903baea370d4906d22d1b39c23320d7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:05:30Z\\\",\\\"message\\\":\\\"2025-12-05T17:04:44+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_2d7d6f8b-5939-404d-a339-48cbe066dc8a\\\\n2025-12-05T17:04:44+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_2d7d6f8b-5939-404d-a339-48cbe066dc8a to /host/opt/cni/bin/\\\\n2025-12-05T17:04:44Z [verbose] multus-daemon started\\\\n2025-12-05T17:04:44Z [verbose] Readiness Indicator file check\\\\n2025-12-05T17:05:29Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:05:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dmzrw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:43Z\\\"}}\" for pod \"openshift-multus\"/\"multus-hpl8r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:06:11Z is after 2025-08-24T17:21:41Z" Dec 05 17:06:11 crc kubenswrapper[4753]: I1205 17:06:11.772616 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ee37fdbe-64d5-4bb1-8522-932d83e0648e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://195b8bf18e6cf937def214e377ec9def799c19ea4d5f008b66cc42048656ffce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d3a5f37b26bfe7933088e08379662250e7b188efbbb5784d49e8d86262e10416\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0061bf86592b301a0a9398d2938e614f76616ed7ddcf310aa50c961632fb58b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://20eb41ef00b7711fdfef0e4f3bd83b2fbdb193be96cafc0ef87f048271e7dd78\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0fea878c0275a3934d26327c13cf5a93ae3b881132788f3d031af5b382bbdd02\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 17:04:35.064086 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 17:04:35.066802 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1773129596/tls.crt::/tmp/serving-cert-1773129596/tls.key\\\\\\\"\\\\nI1205 17:04:40.565607 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 17:04:40.585450 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 17:04:40.585477 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 17:04:40.585506 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 17:04:40.585513 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:04:40.600592 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:04:40.600617 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600622 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:04:40.600626 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:04:40.600629 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:04:40.600632 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:04:40.600635 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:04:40.600704 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 17:04:40.602558 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0585e546af65a61265267b97083556b811ad35ee6c48fa84262b3f4f25eaf907\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:24Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:04:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:04:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:06:11Z is after 2025-08-24T17:21:41Z" Dec 05 17:06:11 crc kubenswrapper[4753]: I1205 17:06:11.795381 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8934d1ec7490b421708e4678d69a55cdd381563ada3d945d066fcf4abba49f3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e799acff1e50a664e95e30896e435bc77a35c8560044c5f180e16c3534bcf828\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:06:11Z is after 2025-08-24T17:21:41Z" Dec 05 17:06:11 crc kubenswrapper[4753]: I1205 17:06:11.811247 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:11 crc kubenswrapper[4753]: I1205 17:06:11.811293 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:11 crc kubenswrapper[4753]: I1205 17:06:11.811302 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:11 crc kubenswrapper[4753]: I1205 17:06:11.811527 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:11 crc kubenswrapper[4753]: I1205 17:06:11.811545 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:11Z","lastTransitionTime":"2025-12-05T17:06:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:11 crc kubenswrapper[4753]: I1205 17:06:11.812721 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c52dbe30298514a572fd7cf4a301357ef0290af5894be7f9d08aff6c3fbe85a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:06:11Z is after 2025-08-24T17:21:41Z" Dec 05 17:06:11 crc kubenswrapper[4753]: I1205 17:06:11.831702 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vj5f7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7f38be29-f040-4e7d-9026-36929c0c5cda\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:42Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b5d403644e00b367518058ecf22ed9913df9a1353e2fede8802deaa288a48e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:04:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vc2cd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:04:42Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vj5f7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:06:11Z is after 2025-08-24T17:21:41Z" Dec 05 17:06:11 crc kubenswrapper[4753]: I1205 17:06:11.851691 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:06:11Z is after 2025-08-24T17:21:41Z" Dec 05 17:06:11 crc kubenswrapper[4753]: I1205 17:06:11.867127 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:06:11Z is after 2025-08-24T17:21:41Z" Dec 05 17:06:11 crc kubenswrapper[4753]: I1205 17:06:11.882095 4753 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:04:40Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:06:11Z is after 2025-08-24T17:21:41Z" Dec 05 17:06:11 crc kubenswrapper[4753]: I1205 17:06:11.901867 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=61.901846259 podStartE2EDuration="1m1.901846259s" podCreationTimestamp="2025-12-05 17:05:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:06:11.90119013 +0000 UTC m=+110.404297176" watchObservedRunningTime="2025-12-05 17:06:11.901846259 +0000 UTC m=+110.404953265" Dec 05 17:06:11 crc kubenswrapper[4753]: I1205 17:06:11.914914 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:11 crc kubenswrapper[4753]: I1205 17:06:11.914962 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:11 crc kubenswrapper[4753]: I1205 17:06:11.914974 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:11 crc kubenswrapper[4753]: I1205 17:06:11.914996 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:11 crc kubenswrapper[4753]: I1205 17:06:11.915009 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:11Z","lastTransitionTime":"2025-12-05T17:06:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:11 crc kubenswrapper[4753]: I1205 17:06:11.928347 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-2rg4s" podStartSLOduration=89.928314944 podStartE2EDuration="1m29.928314944s" podCreationTimestamp="2025-12-05 17:04:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:06:11.92229231 +0000 UTC m=+110.425399316" watchObservedRunningTime="2025-12-05 17:06:11.928314944 +0000 UTC m=+110.431421970" Dec 05 17:06:11 crc kubenswrapper[4753]: I1205 17:06:11.966780 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-f6qn6" podStartSLOduration=89.966730535 podStartE2EDuration="1m29.966730535s" podCreationTimestamp="2025-12-05 17:04:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:06:11.966524979 +0000 UTC m=+110.469632005" watchObservedRunningTime="2025-12-05 17:06:11.966730535 +0000 UTC m=+110.469837561" Dec 05 17:06:11 crc kubenswrapper[4753]: I1205 17:06:11.982452 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-bhvk4" podStartSLOduration=88.982414218 podStartE2EDuration="1m28.982414218s" podCreationTimestamp="2025-12-05 17:04:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:06:11.982388567 +0000 UTC m=+110.485495593" watchObservedRunningTime="2025-12-05 17:06:11.982414218 +0000 UTC m=+110.485521234" Dec 05 17:06:12 crc kubenswrapper[4753]: I1205 17:06:12.007763 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=30.00771192 podStartE2EDuration="30.00771192s" podCreationTimestamp="2025-12-05 17:05:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:06:12.006928507 +0000 UTC m=+110.510035543" watchObservedRunningTime="2025-12-05 17:06:12.00771192 +0000 UTC m=+110.510818946" Dec 05 17:06:12 crc kubenswrapper[4753]: I1205 17:06:12.018870 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:12 crc kubenswrapper[4753]: I1205 17:06:12.018924 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:12 crc kubenswrapper[4753]: I1205 17:06:12.018942 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:12 crc kubenswrapper[4753]: I1205 17:06:12.018966 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:12 crc kubenswrapper[4753]: I1205 17:06:12.018981 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:12Z","lastTransitionTime":"2025-12-05T17:06:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:12 crc kubenswrapper[4753]: I1205 17:06:12.043822 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=14.043783073 podStartE2EDuration="14.043783073s" podCreationTimestamp="2025-12-05 17:05:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:06:12.038871961 +0000 UTC m=+110.541978997" watchObservedRunningTime="2025-12-05 17:06:12.043783073 +0000 UTC m=+110.546890079" Dec 05 17:06:12 crc kubenswrapper[4753]: I1205 17:06:12.059076 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=91.059058464 podStartE2EDuration="1m31.059058464s" podCreationTimestamp="2025-12-05 17:04:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:06:12.058244001 +0000 UTC m=+110.561351007" watchObservedRunningTime="2025-12-05 17:06:12.059058464 +0000 UTC m=+110.562165470" Dec 05 17:06:12 crc kubenswrapper[4753]: I1205 17:06:12.098449 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podStartSLOduration=90.098432563 podStartE2EDuration="1m30.098432563s" podCreationTimestamp="2025-12-05 17:04:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:06:12.085386765 +0000 UTC m=+110.588493771" watchObservedRunningTime="2025-12-05 17:06:12.098432563 +0000 UTC m=+110.601539569" Dec 05 17:06:12 crc kubenswrapper[4753]: I1205 17:06:12.121867 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:12 crc kubenswrapper[4753]: I1205 17:06:12.121929 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:12 crc kubenswrapper[4753]: I1205 17:06:12.121943 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:12 crc kubenswrapper[4753]: I1205 17:06:12.121964 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:12 crc kubenswrapper[4753]: I1205 17:06:12.121977 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:12Z","lastTransitionTime":"2025-12-05T17:06:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:12 crc kubenswrapper[4753]: I1205 17:06:12.224973 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:12 crc kubenswrapper[4753]: I1205 17:06:12.225028 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:12 crc kubenswrapper[4753]: I1205 17:06:12.225041 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:12 crc kubenswrapper[4753]: I1205 17:06:12.225060 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:12 crc kubenswrapper[4753]: I1205 17:06:12.225072 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:12Z","lastTransitionTime":"2025-12-05T17:06:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:12 crc kubenswrapper[4753]: I1205 17:06:12.329013 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:12 crc kubenswrapper[4753]: I1205 17:06:12.329060 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:12 crc kubenswrapper[4753]: I1205 17:06:12.329072 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:12 crc kubenswrapper[4753]: I1205 17:06:12.329089 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:12 crc kubenswrapper[4753]: I1205 17:06:12.329098 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:12Z","lastTransitionTime":"2025-12-05T17:06:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:12 crc kubenswrapper[4753]: I1205 17:06:12.432487 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:12 crc kubenswrapper[4753]: I1205 17:06:12.432609 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:12 crc kubenswrapper[4753]: I1205 17:06:12.432629 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:12 crc kubenswrapper[4753]: I1205 17:06:12.432666 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:12 crc kubenswrapper[4753]: I1205 17:06:12.432690 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:12Z","lastTransitionTime":"2025-12-05T17:06:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:12 crc kubenswrapper[4753]: I1205 17:06:12.536369 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:12 crc kubenswrapper[4753]: I1205 17:06:12.536476 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:12 crc kubenswrapper[4753]: I1205 17:06:12.536509 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:12 crc kubenswrapper[4753]: I1205 17:06:12.536534 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:12 crc kubenswrapper[4753]: I1205 17:06:12.536549 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:12Z","lastTransitionTime":"2025-12-05T17:06:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:12 crc kubenswrapper[4753]: I1205 17:06:12.641009 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:12 crc kubenswrapper[4753]: I1205 17:06:12.641091 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:12 crc kubenswrapper[4753]: I1205 17:06:12.641105 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:12 crc kubenswrapper[4753]: I1205 17:06:12.641131 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:12 crc kubenswrapper[4753]: I1205 17:06:12.641167 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:12Z","lastTransitionTime":"2025-12-05T17:06:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:12 crc kubenswrapper[4753]: I1205 17:06:12.719530 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jjgfd" Dec 05 17:06:12 crc kubenswrapper[4753]: E1205 17:06:12.719812 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jjgfd" podUID="00ab636b-9cc9-4a6f-8e6e-6442b35280ca" Dec 05 17:06:12 crc kubenswrapper[4753]: I1205 17:06:12.719924 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:06:12 crc kubenswrapper[4753]: I1205 17:06:12.720059 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:06:12 crc kubenswrapper[4753]: E1205 17:06:12.720169 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:06:12 crc kubenswrapper[4753]: E1205 17:06:12.720869 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:06:12 crc kubenswrapper[4753]: I1205 17:06:12.745601 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:12 crc kubenswrapper[4753]: I1205 17:06:12.745666 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:12 crc kubenswrapper[4753]: I1205 17:06:12.745689 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:12 crc kubenswrapper[4753]: I1205 17:06:12.745719 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:12 crc kubenswrapper[4753]: I1205 17:06:12.745741 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:12Z","lastTransitionTime":"2025-12-05T17:06:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:12 crc kubenswrapper[4753]: I1205 17:06:12.848604 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:12 crc kubenswrapper[4753]: I1205 17:06:12.848691 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:12 crc kubenswrapper[4753]: I1205 17:06:12.848710 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:12 crc kubenswrapper[4753]: I1205 17:06:12.848734 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:12 crc kubenswrapper[4753]: I1205 17:06:12.848747 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:12Z","lastTransitionTime":"2025-12-05T17:06:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:12 crc kubenswrapper[4753]: I1205 17:06:12.952370 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:12 crc kubenswrapper[4753]: I1205 17:06:12.952439 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:12 crc kubenswrapper[4753]: I1205 17:06:12.952459 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:12 crc kubenswrapper[4753]: I1205 17:06:12.952488 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:12 crc kubenswrapper[4753]: I1205 17:06:12.952511 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:12Z","lastTransitionTime":"2025-12-05T17:06:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:13 crc kubenswrapper[4753]: I1205 17:06:13.055397 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:13 crc kubenswrapper[4753]: I1205 17:06:13.055469 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:13 crc kubenswrapper[4753]: I1205 17:06:13.055489 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:13 crc kubenswrapper[4753]: I1205 17:06:13.055517 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:13 crc kubenswrapper[4753]: I1205 17:06:13.055536 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:13Z","lastTransitionTime":"2025-12-05T17:06:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:13 crc kubenswrapper[4753]: I1205 17:06:13.159268 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:13 crc kubenswrapper[4753]: I1205 17:06:13.159336 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:13 crc kubenswrapper[4753]: I1205 17:06:13.159351 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:13 crc kubenswrapper[4753]: I1205 17:06:13.159379 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:13 crc kubenswrapper[4753]: I1205 17:06:13.159398 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:13Z","lastTransitionTime":"2025-12-05T17:06:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:13 crc kubenswrapper[4753]: I1205 17:06:13.262742 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:13 crc kubenswrapper[4753]: I1205 17:06:13.262782 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:13 crc kubenswrapper[4753]: I1205 17:06:13.262797 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:13 crc kubenswrapper[4753]: I1205 17:06:13.262821 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:13 crc kubenswrapper[4753]: I1205 17:06:13.262837 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:13Z","lastTransitionTime":"2025-12-05T17:06:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:13 crc kubenswrapper[4753]: I1205 17:06:13.365609 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:13 crc kubenswrapper[4753]: I1205 17:06:13.365648 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:13 crc kubenswrapper[4753]: I1205 17:06:13.365660 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:13 crc kubenswrapper[4753]: I1205 17:06:13.365677 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:13 crc kubenswrapper[4753]: I1205 17:06:13.365688 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:13Z","lastTransitionTime":"2025-12-05T17:06:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:13 crc kubenswrapper[4753]: I1205 17:06:13.468492 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:13 crc kubenswrapper[4753]: I1205 17:06:13.468558 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:13 crc kubenswrapper[4753]: I1205 17:06:13.468568 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:13 crc kubenswrapper[4753]: I1205 17:06:13.468580 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:13 crc kubenswrapper[4753]: I1205 17:06:13.468594 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:13Z","lastTransitionTime":"2025-12-05T17:06:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:13 crc kubenswrapper[4753]: I1205 17:06:13.571544 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:13 crc kubenswrapper[4753]: I1205 17:06:13.571632 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:13 crc kubenswrapper[4753]: I1205 17:06:13.571656 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:13 crc kubenswrapper[4753]: I1205 17:06:13.571688 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:13 crc kubenswrapper[4753]: I1205 17:06:13.571708 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:13Z","lastTransitionTime":"2025-12-05T17:06:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:13 crc kubenswrapper[4753]: I1205 17:06:13.675526 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:13 crc kubenswrapper[4753]: I1205 17:06:13.675582 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:13 crc kubenswrapper[4753]: I1205 17:06:13.675592 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:13 crc kubenswrapper[4753]: I1205 17:06:13.675609 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:13 crc kubenswrapper[4753]: I1205 17:06:13.675623 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:13Z","lastTransitionTime":"2025-12-05T17:06:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:13 crc kubenswrapper[4753]: I1205 17:06:13.719786 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:06:13 crc kubenswrapper[4753]: E1205 17:06:13.720036 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:06:13 crc kubenswrapper[4753]: I1205 17:06:13.721383 4753 scope.go:117] "RemoveContainer" containerID="7a20782d19103ad754e8771413e6ca949a706a6876c2e949437969541c112d24" Dec 05 17:06:13 crc kubenswrapper[4753]: E1205 17:06:13.721601 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-98fvv_openshift-ovn-kubernetes(f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a)\"" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" podUID="f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" Dec 05 17:06:13 crc kubenswrapper[4753]: I1205 17:06:13.778987 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:13 crc kubenswrapper[4753]: I1205 17:06:13.779058 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:13 crc kubenswrapper[4753]: I1205 17:06:13.779076 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:13 crc kubenswrapper[4753]: I1205 17:06:13.779102 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:13 crc kubenswrapper[4753]: I1205 17:06:13.779117 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:13Z","lastTransitionTime":"2025-12-05T17:06:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:13 crc kubenswrapper[4753]: I1205 17:06:13.882556 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:13 crc kubenswrapper[4753]: I1205 17:06:13.882606 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:13 crc kubenswrapper[4753]: I1205 17:06:13.882617 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:13 crc kubenswrapper[4753]: I1205 17:06:13.882635 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:13 crc kubenswrapper[4753]: I1205 17:06:13.882649 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:13Z","lastTransitionTime":"2025-12-05T17:06:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:13 crc kubenswrapper[4753]: I1205 17:06:13.985782 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:13 crc kubenswrapper[4753]: I1205 17:06:13.985828 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:13 crc kubenswrapper[4753]: I1205 17:06:13.985839 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:13 crc kubenswrapper[4753]: I1205 17:06:13.985859 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:13 crc kubenswrapper[4753]: I1205 17:06:13.985872 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:13Z","lastTransitionTime":"2025-12-05T17:06:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:14 crc kubenswrapper[4753]: I1205 17:06:14.090127 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:14 crc kubenswrapper[4753]: I1205 17:06:14.090213 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:14 crc kubenswrapper[4753]: I1205 17:06:14.090234 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:14 crc kubenswrapper[4753]: I1205 17:06:14.090264 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:14 crc kubenswrapper[4753]: I1205 17:06:14.090288 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:14Z","lastTransitionTime":"2025-12-05T17:06:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:14 crc kubenswrapper[4753]: I1205 17:06:14.193837 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:14 crc kubenswrapper[4753]: I1205 17:06:14.193929 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:14 crc kubenswrapper[4753]: I1205 17:06:14.193948 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:14 crc kubenswrapper[4753]: I1205 17:06:14.193981 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:14 crc kubenswrapper[4753]: I1205 17:06:14.194001 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:14Z","lastTransitionTime":"2025-12-05T17:06:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:14 crc kubenswrapper[4753]: I1205 17:06:14.298023 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:14 crc kubenswrapper[4753]: I1205 17:06:14.298090 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:14 crc kubenswrapper[4753]: I1205 17:06:14.298109 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:14 crc kubenswrapper[4753]: I1205 17:06:14.298136 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:14 crc kubenswrapper[4753]: I1205 17:06:14.298192 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:14Z","lastTransitionTime":"2025-12-05T17:06:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:14 crc kubenswrapper[4753]: I1205 17:06:14.401392 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:14 crc kubenswrapper[4753]: I1205 17:06:14.401445 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:14 crc kubenswrapper[4753]: I1205 17:06:14.401459 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:14 crc kubenswrapper[4753]: I1205 17:06:14.401478 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:14 crc kubenswrapper[4753]: I1205 17:06:14.401492 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:14Z","lastTransitionTime":"2025-12-05T17:06:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:14 crc kubenswrapper[4753]: I1205 17:06:14.505133 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:14 crc kubenswrapper[4753]: I1205 17:06:14.505256 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:14 crc kubenswrapper[4753]: I1205 17:06:14.505280 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:14 crc kubenswrapper[4753]: I1205 17:06:14.505322 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:14 crc kubenswrapper[4753]: I1205 17:06:14.505353 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:14Z","lastTransitionTime":"2025-12-05T17:06:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:14 crc kubenswrapper[4753]: I1205 17:06:14.609480 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:14 crc kubenswrapper[4753]: I1205 17:06:14.609556 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:14 crc kubenswrapper[4753]: I1205 17:06:14.609575 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:14 crc kubenswrapper[4753]: I1205 17:06:14.609605 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:14 crc kubenswrapper[4753]: I1205 17:06:14.609628 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:14Z","lastTransitionTime":"2025-12-05T17:06:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:14 crc kubenswrapper[4753]: I1205 17:06:14.712712 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:14 crc kubenswrapper[4753]: I1205 17:06:14.712784 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:14 crc kubenswrapper[4753]: I1205 17:06:14.712802 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:14 crc kubenswrapper[4753]: I1205 17:06:14.712831 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:14 crc kubenswrapper[4753]: I1205 17:06:14.712855 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:14Z","lastTransitionTime":"2025-12-05T17:06:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:14 crc kubenswrapper[4753]: I1205 17:06:14.719575 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jjgfd" Dec 05 17:06:14 crc kubenswrapper[4753]: I1205 17:06:14.719604 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:06:14 crc kubenswrapper[4753]: I1205 17:06:14.719686 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:06:14 crc kubenswrapper[4753]: E1205 17:06:14.719736 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jjgfd" podUID="00ab636b-9cc9-4a6f-8e6e-6442b35280ca" Dec 05 17:06:14 crc kubenswrapper[4753]: E1205 17:06:14.719912 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:06:14 crc kubenswrapper[4753]: E1205 17:06:14.720061 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:06:14 crc kubenswrapper[4753]: I1205 17:06:14.816480 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:14 crc kubenswrapper[4753]: I1205 17:06:14.816577 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:14 crc kubenswrapper[4753]: I1205 17:06:14.816593 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:14 crc kubenswrapper[4753]: I1205 17:06:14.816617 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:14 crc kubenswrapper[4753]: I1205 17:06:14.816637 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:14Z","lastTransitionTime":"2025-12-05T17:06:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:14 crc kubenswrapper[4753]: I1205 17:06:14.920086 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:14 crc kubenswrapper[4753]: I1205 17:06:14.920243 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:14 crc kubenswrapper[4753]: I1205 17:06:14.920274 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:14 crc kubenswrapper[4753]: I1205 17:06:14.920319 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:14 crc kubenswrapper[4753]: I1205 17:06:14.920343 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:14Z","lastTransitionTime":"2025-12-05T17:06:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:15 crc kubenswrapper[4753]: I1205 17:06:15.031122 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:15 crc kubenswrapper[4753]: I1205 17:06:15.031265 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:15 crc kubenswrapper[4753]: I1205 17:06:15.031290 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:15 crc kubenswrapper[4753]: I1205 17:06:15.031327 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:15 crc kubenswrapper[4753]: I1205 17:06:15.031353 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:15Z","lastTransitionTime":"2025-12-05T17:06:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:15 crc kubenswrapper[4753]: I1205 17:06:15.134899 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:15 crc kubenswrapper[4753]: I1205 17:06:15.134985 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:15 crc kubenswrapper[4753]: I1205 17:06:15.135005 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:15 crc kubenswrapper[4753]: I1205 17:06:15.135039 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:15 crc kubenswrapper[4753]: I1205 17:06:15.135063 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:15Z","lastTransitionTime":"2025-12-05T17:06:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:15 crc kubenswrapper[4753]: I1205 17:06:15.239233 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:15 crc kubenswrapper[4753]: I1205 17:06:15.239368 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:15 crc kubenswrapper[4753]: I1205 17:06:15.239388 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:15 crc kubenswrapper[4753]: I1205 17:06:15.239416 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:15 crc kubenswrapper[4753]: I1205 17:06:15.239436 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:15Z","lastTransitionTime":"2025-12-05T17:06:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:15 crc kubenswrapper[4753]: I1205 17:06:15.343080 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:15 crc kubenswrapper[4753]: I1205 17:06:15.343186 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:15 crc kubenswrapper[4753]: I1205 17:06:15.343212 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:15 crc kubenswrapper[4753]: I1205 17:06:15.343247 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:15 crc kubenswrapper[4753]: I1205 17:06:15.343276 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:15Z","lastTransitionTime":"2025-12-05T17:06:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:15 crc kubenswrapper[4753]: I1205 17:06:15.447002 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:15 crc kubenswrapper[4753]: I1205 17:06:15.447093 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:15 crc kubenswrapper[4753]: I1205 17:06:15.447113 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:15 crc kubenswrapper[4753]: I1205 17:06:15.447142 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:15 crc kubenswrapper[4753]: I1205 17:06:15.447218 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:15Z","lastTransitionTime":"2025-12-05T17:06:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:15 crc kubenswrapper[4753]: I1205 17:06:15.551360 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:15 crc kubenswrapper[4753]: I1205 17:06:15.551435 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:15 crc kubenswrapper[4753]: I1205 17:06:15.551457 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:15 crc kubenswrapper[4753]: I1205 17:06:15.551492 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:15 crc kubenswrapper[4753]: I1205 17:06:15.551513 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:15Z","lastTransitionTime":"2025-12-05T17:06:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:15 crc kubenswrapper[4753]: I1205 17:06:15.655080 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:15 crc kubenswrapper[4753]: I1205 17:06:15.655197 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:15 crc kubenswrapper[4753]: I1205 17:06:15.655215 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:15 crc kubenswrapper[4753]: I1205 17:06:15.655238 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:15 crc kubenswrapper[4753]: I1205 17:06:15.655255 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:15Z","lastTransitionTime":"2025-12-05T17:06:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:15 crc kubenswrapper[4753]: I1205 17:06:15.719981 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:06:15 crc kubenswrapper[4753]: E1205 17:06:15.720949 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:06:15 crc kubenswrapper[4753]: I1205 17:06:15.758823 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:15 crc kubenswrapper[4753]: I1205 17:06:15.758902 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:15 crc kubenswrapper[4753]: I1205 17:06:15.758923 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:15 crc kubenswrapper[4753]: I1205 17:06:15.758954 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:15 crc kubenswrapper[4753]: I1205 17:06:15.758976 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:15Z","lastTransitionTime":"2025-12-05T17:06:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:15 crc kubenswrapper[4753]: I1205 17:06:15.861760 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:15 crc kubenswrapper[4753]: I1205 17:06:15.861833 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:15 crc kubenswrapper[4753]: I1205 17:06:15.861855 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:15 crc kubenswrapper[4753]: I1205 17:06:15.861885 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:15 crc kubenswrapper[4753]: I1205 17:06:15.861906 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:15Z","lastTransitionTime":"2025-12-05T17:06:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:15 crc kubenswrapper[4753]: I1205 17:06:15.885177 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-hpl8r_6b3d3501-4f16-4375-adf2-fd54b1cd13cf/kube-multus/1.log" Dec 05 17:06:15 crc kubenswrapper[4753]: I1205 17:06:15.885972 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-hpl8r_6b3d3501-4f16-4375-adf2-fd54b1cd13cf/kube-multus/0.log" Dec 05 17:06:15 crc kubenswrapper[4753]: I1205 17:06:15.886060 4753 generic.go:334] "Generic (PLEG): container finished" podID="6b3d3501-4f16-4375-adf2-fd54b1cd13cf" containerID="1096ebd14458819c0dbe4f5a86f2812eb19236d7d5062bfe2eda62ecdc05c55e" exitCode=1 Dec 05 17:06:15 crc kubenswrapper[4753]: I1205 17:06:15.886138 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-hpl8r" event={"ID":"6b3d3501-4f16-4375-adf2-fd54b1cd13cf","Type":"ContainerDied","Data":"1096ebd14458819c0dbe4f5a86f2812eb19236d7d5062bfe2eda62ecdc05c55e"} Dec 05 17:06:15 crc kubenswrapper[4753]: I1205 17:06:15.886292 4753 scope.go:117] "RemoveContainer" containerID="8b2cc49130697a15123f202462d2f0781903baea370d4906d22d1b39c23320d7" Dec 05 17:06:15 crc kubenswrapper[4753]: I1205 17:06:15.888315 4753 scope.go:117] "RemoveContainer" containerID="1096ebd14458819c0dbe4f5a86f2812eb19236d7d5062bfe2eda62ecdc05c55e" Dec 05 17:06:15 crc kubenswrapper[4753]: E1205 17:06:15.888747 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-hpl8r_openshift-multus(6b3d3501-4f16-4375-adf2-fd54b1cd13cf)\"" pod="openshift-multus/multus-hpl8r" podUID="6b3d3501-4f16-4375-adf2-fd54b1cd13cf" Dec 05 17:06:15 crc kubenswrapper[4753]: I1205 17:06:15.969349 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:15 crc kubenswrapper[4753]: I1205 17:06:15.969417 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:15 crc kubenswrapper[4753]: I1205 17:06:15.969435 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:15 crc kubenswrapper[4753]: I1205 17:06:15.969518 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:15 crc kubenswrapper[4753]: I1205 17:06:15.969552 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:15Z","lastTransitionTime":"2025-12-05T17:06:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:15 crc kubenswrapper[4753]: I1205 17:06:15.979887 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-vj5f7" podStartSLOduration=93.979848935 podStartE2EDuration="1m33.979848935s" podCreationTimestamp="2025-12-05 17:04:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:06:15.978775284 +0000 UTC m=+114.481882360" watchObservedRunningTime="2025-12-05 17:06:15.979848935 +0000 UTC m=+114.482955981" Dec 05 17:06:16 crc kubenswrapper[4753]: I1205 17:06:16.008854 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=96.008817053 podStartE2EDuration="1m36.008817053s" podCreationTimestamp="2025-12-05 17:04:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:06:16.008688149 +0000 UTC m=+114.511795255" watchObservedRunningTime="2025-12-05 17:06:16.008817053 +0000 UTC m=+114.511924089" Dec 05 17:06:16 crc kubenswrapper[4753]: I1205 17:06:16.074730 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:16 crc kubenswrapper[4753]: I1205 17:06:16.074818 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:16 crc kubenswrapper[4753]: I1205 17:06:16.074839 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:16 crc kubenswrapper[4753]: I1205 17:06:16.074872 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:16 crc kubenswrapper[4753]: I1205 17:06:16.074894 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:16Z","lastTransitionTime":"2025-12-05T17:06:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:16 crc kubenswrapper[4753]: I1205 17:06:16.178131 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:16 crc kubenswrapper[4753]: I1205 17:06:16.178251 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:16 crc kubenswrapper[4753]: I1205 17:06:16.178267 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:16 crc kubenswrapper[4753]: I1205 17:06:16.178295 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:16 crc kubenswrapper[4753]: I1205 17:06:16.178320 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:16Z","lastTransitionTime":"2025-12-05T17:06:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:16 crc kubenswrapper[4753]: I1205 17:06:16.281743 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:16 crc kubenswrapper[4753]: I1205 17:06:16.281805 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:16 crc kubenswrapper[4753]: I1205 17:06:16.281815 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:16 crc kubenswrapper[4753]: I1205 17:06:16.281833 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:16 crc kubenswrapper[4753]: I1205 17:06:16.281847 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:16Z","lastTransitionTime":"2025-12-05T17:06:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:16 crc kubenswrapper[4753]: I1205 17:06:16.384991 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:16 crc kubenswrapper[4753]: I1205 17:06:16.385069 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:16 crc kubenswrapper[4753]: I1205 17:06:16.385091 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:16 crc kubenswrapper[4753]: I1205 17:06:16.385118 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:16 crc kubenswrapper[4753]: I1205 17:06:16.385140 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:16Z","lastTransitionTime":"2025-12-05T17:06:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:16 crc kubenswrapper[4753]: I1205 17:06:16.489381 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:16 crc kubenswrapper[4753]: I1205 17:06:16.489442 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:16 crc kubenswrapper[4753]: I1205 17:06:16.489457 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:16 crc kubenswrapper[4753]: I1205 17:06:16.489487 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:16 crc kubenswrapper[4753]: I1205 17:06:16.489501 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:16Z","lastTransitionTime":"2025-12-05T17:06:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:16 crc kubenswrapper[4753]: I1205 17:06:16.593178 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:16 crc kubenswrapper[4753]: I1205 17:06:16.593231 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:16 crc kubenswrapper[4753]: I1205 17:06:16.593245 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:16 crc kubenswrapper[4753]: I1205 17:06:16.593268 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:16 crc kubenswrapper[4753]: I1205 17:06:16.593283 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:16Z","lastTransitionTime":"2025-12-05T17:06:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:16 crc kubenswrapper[4753]: I1205 17:06:16.697463 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:16 crc kubenswrapper[4753]: I1205 17:06:16.697552 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:16 crc kubenswrapper[4753]: I1205 17:06:16.697575 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:16 crc kubenswrapper[4753]: I1205 17:06:16.697614 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:16 crc kubenswrapper[4753]: I1205 17:06:16.697639 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:16Z","lastTransitionTime":"2025-12-05T17:06:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:16 crc kubenswrapper[4753]: I1205 17:06:16.719669 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:06:16 crc kubenswrapper[4753]: I1205 17:06:16.719775 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:06:16 crc kubenswrapper[4753]: I1205 17:06:16.719886 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jjgfd" Dec 05 17:06:16 crc kubenswrapper[4753]: E1205 17:06:16.720041 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:06:16 crc kubenswrapper[4753]: E1205 17:06:16.720279 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:06:16 crc kubenswrapper[4753]: E1205 17:06:16.720482 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jjgfd" podUID="00ab636b-9cc9-4a6f-8e6e-6442b35280ca" Dec 05 17:06:16 crc kubenswrapper[4753]: I1205 17:06:16.802297 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:16 crc kubenswrapper[4753]: I1205 17:06:16.802373 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:16 crc kubenswrapper[4753]: I1205 17:06:16.802394 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:16 crc kubenswrapper[4753]: I1205 17:06:16.802427 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:16 crc kubenswrapper[4753]: I1205 17:06:16.802446 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:16Z","lastTransitionTime":"2025-12-05T17:06:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:16 crc kubenswrapper[4753]: I1205 17:06:16.892801 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-hpl8r_6b3d3501-4f16-4375-adf2-fd54b1cd13cf/kube-multus/1.log" Dec 05 17:06:16 crc kubenswrapper[4753]: I1205 17:06:16.906825 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:16 crc kubenswrapper[4753]: I1205 17:06:16.906917 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:16 crc kubenswrapper[4753]: I1205 17:06:16.906936 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:16 crc kubenswrapper[4753]: I1205 17:06:16.906961 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:16 crc kubenswrapper[4753]: I1205 17:06:16.906987 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:16Z","lastTransitionTime":"2025-12-05T17:06:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:17 crc kubenswrapper[4753]: I1205 17:06:17.010275 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:17 crc kubenswrapper[4753]: I1205 17:06:17.010339 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:17 crc kubenswrapper[4753]: I1205 17:06:17.010357 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:17 crc kubenswrapper[4753]: I1205 17:06:17.010385 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:17 crc kubenswrapper[4753]: I1205 17:06:17.010402 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:17Z","lastTransitionTime":"2025-12-05T17:06:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:17 crc kubenswrapper[4753]: I1205 17:06:17.113498 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:17 crc kubenswrapper[4753]: I1205 17:06:17.113546 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:17 crc kubenswrapper[4753]: I1205 17:06:17.113561 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:17 crc kubenswrapper[4753]: I1205 17:06:17.113609 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:17 crc kubenswrapper[4753]: I1205 17:06:17.113622 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:17Z","lastTransitionTime":"2025-12-05T17:06:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:17 crc kubenswrapper[4753]: I1205 17:06:17.216828 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:17 crc kubenswrapper[4753]: I1205 17:06:17.216888 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:17 crc kubenswrapper[4753]: I1205 17:06:17.216908 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:17 crc kubenswrapper[4753]: I1205 17:06:17.216931 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:17 crc kubenswrapper[4753]: I1205 17:06:17.216949 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:17Z","lastTransitionTime":"2025-12-05T17:06:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:17 crc kubenswrapper[4753]: I1205 17:06:17.319949 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:17 crc kubenswrapper[4753]: I1205 17:06:17.320003 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:17 crc kubenswrapper[4753]: I1205 17:06:17.320032 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:17 crc kubenswrapper[4753]: I1205 17:06:17.320047 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:17 crc kubenswrapper[4753]: I1205 17:06:17.320055 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:17Z","lastTransitionTime":"2025-12-05T17:06:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:17 crc kubenswrapper[4753]: I1205 17:06:17.423415 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:17 crc kubenswrapper[4753]: I1205 17:06:17.423974 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:17 crc kubenswrapper[4753]: I1205 17:06:17.424202 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:17 crc kubenswrapper[4753]: I1205 17:06:17.424412 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:17 crc kubenswrapper[4753]: I1205 17:06:17.424577 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:17Z","lastTransitionTime":"2025-12-05T17:06:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:17 crc kubenswrapper[4753]: I1205 17:06:17.529084 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:17 crc kubenswrapper[4753]: I1205 17:06:17.529535 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:17 crc kubenswrapper[4753]: I1205 17:06:17.529654 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:17 crc kubenswrapper[4753]: I1205 17:06:17.529811 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:17 crc kubenswrapper[4753]: I1205 17:06:17.529939 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:17Z","lastTransitionTime":"2025-12-05T17:06:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:17 crc kubenswrapper[4753]: I1205 17:06:17.633692 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:17 crc kubenswrapper[4753]: I1205 17:06:17.633988 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:17 crc kubenswrapper[4753]: I1205 17:06:17.634056 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:17 crc kubenswrapper[4753]: I1205 17:06:17.634134 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:17 crc kubenswrapper[4753]: I1205 17:06:17.634217 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:17Z","lastTransitionTime":"2025-12-05T17:06:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:17 crc kubenswrapper[4753]: I1205 17:06:17.692685 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:06:17 crc kubenswrapper[4753]: I1205 17:06:17.692717 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:06:17 crc kubenswrapper[4753]: I1205 17:06:17.692726 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:06:17 crc kubenswrapper[4753]: I1205 17:06:17.692742 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:06:17 crc kubenswrapper[4753]: I1205 17:06:17.692753 4753 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:06:17Z","lastTransitionTime":"2025-12-05T17:06:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:06:17 crc kubenswrapper[4753]: I1205 17:06:17.719877 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:06:17 crc kubenswrapper[4753]: E1205 17:06:17.720132 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:06:17 crc kubenswrapper[4753]: I1205 17:06:17.753409 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-rvlnf"] Dec 05 17:06:17 crc kubenswrapper[4753]: I1205 17:06:17.754393 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rvlnf" Dec 05 17:06:17 crc kubenswrapper[4753]: I1205 17:06:17.759014 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 05 17:06:17 crc kubenswrapper[4753]: I1205 17:06:17.759434 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 05 17:06:17 crc kubenswrapper[4753]: I1205 17:06:17.760298 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 05 17:06:17 crc kubenswrapper[4753]: I1205 17:06:17.760530 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 05 17:06:17 crc kubenswrapper[4753]: I1205 17:06:17.857782 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/b06d9253-daf6-436c-b58f-18c12424205d-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-rvlnf\" (UID: \"b06d9253-daf6-436c-b58f-18c12424205d\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rvlnf" Dec 05 17:06:17 crc kubenswrapper[4753]: I1205 17:06:17.857836 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b06d9253-daf6-436c-b58f-18c12424205d-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-rvlnf\" (UID: \"b06d9253-daf6-436c-b58f-18c12424205d\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rvlnf" Dec 05 17:06:17 crc kubenswrapper[4753]: I1205 17:06:17.857887 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/b06d9253-daf6-436c-b58f-18c12424205d-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-rvlnf\" (UID: \"b06d9253-daf6-436c-b58f-18c12424205d\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rvlnf" Dec 05 17:06:17 crc kubenswrapper[4753]: I1205 17:06:17.857908 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/b06d9253-daf6-436c-b58f-18c12424205d-service-ca\") pod \"cluster-version-operator-5c965bbfc6-rvlnf\" (UID: \"b06d9253-daf6-436c-b58f-18c12424205d\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rvlnf" Dec 05 17:06:17 crc kubenswrapper[4753]: I1205 17:06:17.857962 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b06d9253-daf6-436c-b58f-18c12424205d-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-rvlnf\" (UID: \"b06d9253-daf6-436c-b58f-18c12424205d\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rvlnf" Dec 05 17:06:17 crc kubenswrapper[4753]: I1205 17:06:17.958658 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b06d9253-daf6-436c-b58f-18c12424205d-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-rvlnf\" (UID: \"b06d9253-daf6-436c-b58f-18c12424205d\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rvlnf" Dec 05 17:06:17 crc kubenswrapper[4753]: I1205 17:06:17.958711 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/b06d9253-daf6-436c-b58f-18c12424205d-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-rvlnf\" (UID: \"b06d9253-daf6-436c-b58f-18c12424205d\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rvlnf" Dec 05 17:06:17 crc kubenswrapper[4753]: I1205 17:06:17.958734 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b06d9253-daf6-436c-b58f-18c12424205d-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-rvlnf\" (UID: \"b06d9253-daf6-436c-b58f-18c12424205d\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rvlnf" Dec 05 17:06:17 crc kubenswrapper[4753]: I1205 17:06:17.958773 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/b06d9253-daf6-436c-b58f-18c12424205d-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-rvlnf\" (UID: \"b06d9253-daf6-436c-b58f-18c12424205d\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rvlnf" Dec 05 17:06:17 crc kubenswrapper[4753]: I1205 17:06:17.958795 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/b06d9253-daf6-436c-b58f-18c12424205d-service-ca\") pod \"cluster-version-operator-5c965bbfc6-rvlnf\" (UID: \"b06d9253-daf6-436c-b58f-18c12424205d\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rvlnf" Dec 05 17:06:17 crc kubenswrapper[4753]: I1205 17:06:17.959431 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/b06d9253-daf6-436c-b58f-18c12424205d-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-rvlnf\" (UID: \"b06d9253-daf6-436c-b58f-18c12424205d\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rvlnf" Dec 05 17:06:17 crc kubenswrapper[4753]: I1205 17:06:17.959572 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/b06d9253-daf6-436c-b58f-18c12424205d-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-rvlnf\" (UID: \"b06d9253-daf6-436c-b58f-18c12424205d\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rvlnf" Dec 05 17:06:17 crc kubenswrapper[4753]: I1205 17:06:17.959620 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/b06d9253-daf6-436c-b58f-18c12424205d-service-ca\") pod \"cluster-version-operator-5c965bbfc6-rvlnf\" (UID: \"b06d9253-daf6-436c-b58f-18c12424205d\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rvlnf" Dec 05 17:06:17 crc kubenswrapper[4753]: I1205 17:06:17.967498 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b06d9253-daf6-436c-b58f-18c12424205d-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-rvlnf\" (UID: \"b06d9253-daf6-436c-b58f-18c12424205d\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rvlnf" Dec 05 17:06:17 crc kubenswrapper[4753]: I1205 17:06:17.980929 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b06d9253-daf6-436c-b58f-18c12424205d-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-rvlnf\" (UID: \"b06d9253-daf6-436c-b58f-18c12424205d\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rvlnf" Dec 05 17:06:18 crc kubenswrapper[4753]: I1205 17:06:18.080846 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rvlnf" Dec 05 17:06:18 crc kubenswrapper[4753]: I1205 17:06:18.720288 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jjgfd" Dec 05 17:06:18 crc kubenswrapper[4753]: I1205 17:06:18.720312 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:06:18 crc kubenswrapper[4753]: E1205 17:06:18.720504 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jjgfd" podUID="00ab636b-9cc9-4a6f-8e6e-6442b35280ca" Dec 05 17:06:18 crc kubenswrapper[4753]: I1205 17:06:18.720330 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:06:18 crc kubenswrapper[4753]: E1205 17:06:18.720776 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:06:18 crc kubenswrapper[4753]: E1205 17:06:18.720955 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:06:18 crc kubenswrapper[4753]: I1205 17:06:18.903224 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rvlnf" event={"ID":"b06d9253-daf6-436c-b58f-18c12424205d","Type":"ContainerStarted","Data":"effeb41cd273a509e69fb9f2b8cdad81a67fd4c054752863450bf995b3c1d366"} Dec 05 17:06:18 crc kubenswrapper[4753]: I1205 17:06:18.903335 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rvlnf" event={"ID":"b06d9253-daf6-436c-b58f-18c12424205d","Type":"ContainerStarted","Data":"ac5259ebad75d383122327e637c1ccc42f4440f6b70bd009e689a2157ae06bb7"} Dec 05 17:06:18 crc kubenswrapper[4753]: I1205 17:06:18.927124 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-rvlnf" podStartSLOduration=96.927079826 podStartE2EDuration="1m36.927079826s" podCreationTimestamp="2025-12-05 17:04:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:06:18.924218624 +0000 UTC m=+117.427325730" watchObservedRunningTime="2025-12-05 17:06:18.927079826 +0000 UTC m=+117.430186882" Dec 05 17:06:19 crc kubenswrapper[4753]: I1205 17:06:19.720570 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:06:19 crc kubenswrapper[4753]: E1205 17:06:19.720837 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:06:20 crc kubenswrapper[4753]: I1205 17:06:20.720052 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jjgfd" Dec 05 17:06:20 crc kubenswrapper[4753]: E1205 17:06:20.720211 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jjgfd" podUID="00ab636b-9cc9-4a6f-8e6e-6442b35280ca" Dec 05 17:06:20 crc kubenswrapper[4753]: I1205 17:06:20.720071 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:06:20 crc kubenswrapper[4753]: E1205 17:06:20.720273 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:06:20 crc kubenswrapper[4753]: I1205 17:06:20.720072 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:06:20 crc kubenswrapper[4753]: E1205 17:06:20.720355 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:06:21 crc kubenswrapper[4753]: I1205 17:06:21.720321 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:06:21 crc kubenswrapper[4753]: E1205 17:06:21.725385 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:06:21 crc kubenswrapper[4753]: E1205 17:06:21.771531 4753 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Dec 05 17:06:21 crc kubenswrapper[4753]: E1205 17:06:21.850107 4753 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 05 17:06:22 crc kubenswrapper[4753]: I1205 17:06:22.719784 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:06:22 crc kubenswrapper[4753]: I1205 17:06:22.719868 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:06:22 crc kubenswrapper[4753]: E1205 17:06:22.720035 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:06:22 crc kubenswrapper[4753]: I1205 17:06:22.720352 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jjgfd" Dec 05 17:06:22 crc kubenswrapper[4753]: E1205 17:06:22.720600 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jjgfd" podUID="00ab636b-9cc9-4a6f-8e6e-6442b35280ca" Dec 05 17:06:22 crc kubenswrapper[4753]: E1205 17:06:22.720760 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:06:23 crc kubenswrapper[4753]: I1205 17:06:23.719452 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:06:23 crc kubenswrapper[4753]: E1205 17:06:23.719591 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:06:24 crc kubenswrapper[4753]: I1205 17:06:24.720120 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:06:24 crc kubenswrapper[4753]: I1205 17:06:24.720224 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jjgfd" Dec 05 17:06:24 crc kubenswrapper[4753]: E1205 17:06:24.720312 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:06:24 crc kubenswrapper[4753]: I1205 17:06:24.720164 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:06:24 crc kubenswrapper[4753]: E1205 17:06:24.720429 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jjgfd" podUID="00ab636b-9cc9-4a6f-8e6e-6442b35280ca" Dec 05 17:06:24 crc kubenswrapper[4753]: E1205 17:06:24.720517 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:06:25 crc kubenswrapper[4753]: I1205 17:06:25.719510 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:06:25 crc kubenswrapper[4753]: E1205 17:06:25.719663 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:06:26 crc kubenswrapper[4753]: I1205 17:06:26.720286 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:06:26 crc kubenswrapper[4753]: I1205 17:06:26.720416 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jjgfd" Dec 05 17:06:26 crc kubenswrapper[4753]: E1205 17:06:26.720508 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:06:26 crc kubenswrapper[4753]: I1205 17:06:26.720428 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:06:26 crc kubenswrapper[4753]: E1205 17:06:26.720662 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jjgfd" podUID="00ab636b-9cc9-4a6f-8e6e-6442b35280ca" Dec 05 17:06:26 crc kubenswrapper[4753]: E1205 17:06:26.720778 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:06:26 crc kubenswrapper[4753]: E1205 17:06:26.851494 4753 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 05 17:06:27 crc kubenswrapper[4753]: I1205 17:06:27.719787 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:06:27 crc kubenswrapper[4753]: E1205 17:06:27.719958 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:06:27 crc kubenswrapper[4753]: I1205 17:06:27.720312 4753 scope.go:117] "RemoveContainer" containerID="1096ebd14458819c0dbe4f5a86f2812eb19236d7d5062bfe2eda62ecdc05c55e" Dec 05 17:06:27 crc kubenswrapper[4753]: I1205 17:06:27.721254 4753 scope.go:117] "RemoveContainer" containerID="7a20782d19103ad754e8771413e6ca949a706a6876c2e949437969541c112d24" Dec 05 17:06:27 crc kubenswrapper[4753]: I1205 17:06:27.938789 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-hpl8r_6b3d3501-4f16-4375-adf2-fd54b1cd13cf/kube-multus/1.log" Dec 05 17:06:27 crc kubenswrapper[4753]: I1205 17:06:27.938912 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-hpl8r" event={"ID":"6b3d3501-4f16-4375-adf2-fd54b1cd13cf","Type":"ContainerStarted","Data":"eedfb2d21b98fca57c1ceac0b8e177906400b69a7b79c834b4a5059d01f98efe"} Dec 05 17:06:27 crc kubenswrapper[4753]: I1205 17:06:27.942833 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-98fvv_f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a/ovnkube-controller/3.log" Dec 05 17:06:27 crc kubenswrapper[4753]: I1205 17:06:27.946777 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" event={"ID":"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a","Type":"ContainerStarted","Data":"a1c3ee586bdec79f6c186af02f1a6c42318a899455efe849a1d5d1f61c9274d0"} Dec 05 17:06:27 crc kubenswrapper[4753]: I1205 17:06:27.947908 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" Dec 05 17:06:27 crc kubenswrapper[4753]: I1205 17:06:27.958354 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-hpl8r" podStartSLOduration=105.958322463 podStartE2EDuration="1m45.958322463s" podCreationTimestamp="2025-12-05 17:04:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:06:27.956201662 +0000 UTC m=+126.459308668" watchObservedRunningTime="2025-12-05 17:06:27.958322463 +0000 UTC m=+126.461429469" Dec 05 17:06:28 crc kubenswrapper[4753]: I1205 17:06:28.606476 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" podStartSLOduration=106.606434212 podStartE2EDuration="1m46.606434212s" podCreationTimestamp="2025-12-05 17:04:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:06:27.984267813 +0000 UTC m=+126.487374839" watchObservedRunningTime="2025-12-05 17:06:28.606434212 +0000 UTC m=+127.109541248" Dec 05 17:06:28 crc kubenswrapper[4753]: I1205 17:06:28.606956 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-jjgfd"] Dec 05 17:06:28 crc kubenswrapper[4753]: I1205 17:06:28.607235 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jjgfd" Dec 05 17:06:28 crc kubenswrapper[4753]: E1205 17:06:28.607425 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jjgfd" podUID="00ab636b-9cc9-4a6f-8e6e-6442b35280ca" Dec 05 17:06:28 crc kubenswrapper[4753]: I1205 17:06:28.720174 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:06:28 crc kubenswrapper[4753]: I1205 17:06:28.720241 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:06:28 crc kubenswrapper[4753]: E1205 17:06:28.720407 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:06:28 crc kubenswrapper[4753]: E1205 17:06:28.720559 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:06:29 crc kubenswrapper[4753]: I1205 17:06:29.720408 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:06:29 crc kubenswrapper[4753]: E1205 17:06:29.720579 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:06:30 crc kubenswrapper[4753]: I1205 17:06:30.719828 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:06:30 crc kubenswrapper[4753]: I1205 17:06:30.719896 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jjgfd" Dec 05 17:06:30 crc kubenswrapper[4753]: I1205 17:06:30.719828 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:06:30 crc kubenswrapper[4753]: E1205 17:06:30.719981 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:06:30 crc kubenswrapper[4753]: E1205 17:06:30.720064 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:06:30 crc kubenswrapper[4753]: E1205 17:06:30.720192 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-jjgfd" podUID="00ab636b-9cc9-4a6f-8e6e-6442b35280ca" Dec 05 17:06:31 crc kubenswrapper[4753]: I1205 17:06:31.720476 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:06:31 crc kubenswrapper[4753]: E1205 17:06:31.722749 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:06:32 crc kubenswrapper[4753]: I1205 17:06:32.719912 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:06:32 crc kubenswrapper[4753]: I1205 17:06:32.719965 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jjgfd" Dec 05 17:06:32 crc kubenswrapper[4753]: I1205 17:06:32.720084 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:06:32 crc kubenswrapper[4753]: I1205 17:06:32.722080 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 05 17:06:32 crc kubenswrapper[4753]: I1205 17:06:32.722401 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 05 17:06:32 crc kubenswrapper[4753]: I1205 17:06:32.722455 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 05 17:06:32 crc kubenswrapper[4753]: I1205 17:06:32.722554 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 05 17:06:32 crc kubenswrapper[4753]: I1205 17:06:32.722658 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 05 17:06:32 crc kubenswrapper[4753]: I1205 17:06:32.723201 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 05 17:06:32 crc kubenswrapper[4753]: I1205 17:06:32.747523 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" Dec 05 17:06:33 crc kubenswrapper[4753]: I1205 17:06:33.720529 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:06:37 crc kubenswrapper[4753]: I1205 17:06:37.997518 4753 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.037612 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-b7687"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.038178 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-b7687" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.040333 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-rkmxh"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.040427 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.040705 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.040843 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-vx9s2"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.040987 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.041248 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-vx9s2" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.041387 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.041524 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-rkmxh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.041564 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.042405 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-44d5b"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.042723 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-44d5b" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.043096 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-zdzdl"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.043442 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zdzdl" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.044073 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-rnr95"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.044688 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-rnr95" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.045345 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-7zfzd"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.045793 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-7zfzd" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.048946 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.049362 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.050331 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.050840 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.051561 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.051869 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.052080 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.054409 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nhbkn"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.055055 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nhbkn" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.056005 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.056707 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.056753 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.056947 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.057019 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.074009 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.074401 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.074450 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.074683 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.074715 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.074853 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.074884 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.075082 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.075267 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.075452 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.075836 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.076010 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.076070 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.076231 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.076268 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.076336 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.076467 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.076531 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.076587 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.076701 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.076821 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.077118 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.077941 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.078073 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-4gllz"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.078633 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.078692 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-k49xj"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.078809 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.078938 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-2nb5t"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.079245 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-2nb5t" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.079319 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.079452 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.079500 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-4gllz" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.079729 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-k49xj" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.079953 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-9c622"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.080348 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.080371 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-9c622" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.080437 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-64nzp"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.080490 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.080607 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.080676 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.080710 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.080836 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.080876 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-64nzp" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.097409 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.100904 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-kzdfh"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.132849 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-4vvd9"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.133235 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-q2bmk"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.133569 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-kzdfh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.133702 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-q2bmk" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.133768 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.134772 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.135642 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jwgnx"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.136053 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jwgnx" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.136358 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.136484 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.137419 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.141040 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.144621 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-m9rf5"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.145293 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-m9rf5" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.145981 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.146176 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.146281 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.146533 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.146664 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.146900 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.147126 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.150446 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.150595 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.150694 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.151932 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.152213 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.152364 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.152433 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.152482 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.152533 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.152490 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.152666 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.152714 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.152755 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.152779 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.152867 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.152893 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.153004 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.153009 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.153231 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.153249 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.153345 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.153560 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.153645 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.153751 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.153849 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.153925 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.154170 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.157993 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-rvmt6"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.158805 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-rvmt6" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.159256 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.159413 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-9tzjp"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.159445 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.159654 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.159807 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.160094 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-9tzjp" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.160505 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-69bc9"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.161012 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-69bc9" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.161390 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.161527 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.161684 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.165057 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tfkgs"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.165735 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.165803 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tfkgs" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.166421 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.167617 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.173491 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.185252 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.187370 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-vx9s2"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.187791 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.189926 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.193210 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-hzrmq"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.206785 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-hzrmq" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.209056 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.209279 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-rkmxh"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.211300 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/72117afc-2e5f-4696-b515-76aced63c30f-bound-sa-token\") pod \"ingress-operator-5b745b69d9-q2bmk\" (UID: \"72117afc-2e5f-4696-b515-76aced63c30f\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-q2bmk" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.211332 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/c3117f4b-6f3a-4131-9001-d39222e6f268-audit-policies\") pod \"oauth-openshift-558db77b4-kzdfh\" (UID: \"c3117f4b-6f3a-4131-9001-d39222e6f268\") " pod="openshift-authentication/oauth-openshift-558db77b4-kzdfh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.211359 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/72117afc-2e5f-4696-b515-76aced63c30f-metrics-tls\") pod \"ingress-operator-5b745b69d9-q2bmk\" (UID: \"72117afc-2e5f-4696-b515-76aced63c30f\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-q2bmk" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.211391 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c3117f4b-6f3a-4131-9001-d39222e6f268-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-kzdfh\" (UID: \"c3117f4b-6f3a-4131-9001-d39222e6f268\") " pod="openshift-authentication/oauth-openshift-558db77b4-kzdfh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.211433 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/782feb3b-5fe4-413e-87a0-9602f412897e-audit\") pod \"apiserver-76f77b778f-rkmxh\" (UID: \"782feb3b-5fe4-413e-87a0-9602f412897e\") " pod="openshift-apiserver/apiserver-76f77b778f-rkmxh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.211457 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0aa048c9-05f2-41f3-ad26-c88d5c98c453-serving-cert\") pod \"route-controller-manager-6576b87f9c-7zfzd\" (UID: \"0aa048c9-05f2-41f3-ad26-c88d5c98c453\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-7zfzd" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.211479 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/5cd4f96b-673e-4518-a8ee-da3ccb7a86b0-images\") pod \"machine-api-operator-5694c8668f-vx9s2\" (UID: \"5cd4f96b-673e-4518-a8ee-da3ccb7a86b0\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-vx9s2" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.211497 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/3d179b2f-6775-4d3a-be36-3960799428db-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-nhbkn\" (UID: \"3d179b2f-6775-4d3a-be36-3960799428db\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nhbkn" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.211518 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/915c88c7-cac2-48b3-ab7a-6e23e7240465-serving-cert\") pod \"controller-manager-879f6c89f-b7687\" (UID: \"915c88c7-cac2-48b3-ab7a-6e23e7240465\") " pod="openshift-controller-manager/controller-manager-879f6c89f-b7687" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.211536 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rjf54\" (UniqueName: \"kubernetes.io/projected/3d179b2f-6775-4d3a-be36-3960799428db-kube-api-access-rjf54\") pod \"cluster-image-registry-operator-dc59b4c8b-nhbkn\" (UID: \"3d179b2f-6775-4d3a-be36-3960799428db\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nhbkn" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.211553 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/73058959-ba27-4d94-8662-a213a53113e9-config\") pod \"machine-approver-56656f9798-44d5b\" (UID: \"73058959-ba27-4d94-8662-a213a53113e9\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-44d5b" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.211640 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-b7687"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.211664 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/782feb3b-5fe4-413e-87a0-9602f412897e-etcd-client\") pod \"apiserver-76f77b778f-rkmxh\" (UID: \"782feb3b-5fe4-413e-87a0-9602f412897e\") " pod="openshift-apiserver/apiserver-76f77b778f-rkmxh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.211773 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/40a88e42-a0d7-4703-a3f8-25f524f90eca-audit-dir\") pod \"apiserver-7bbb656c7d-zdzdl\" (UID: \"40a88e42-a0d7-4703-a3f8-25f524f90eca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zdzdl" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.211807 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e11d89c-e71a-4a17-b1dd-da3883753fde-config\") pod \"console-operator-58897d9998-64nzp\" (UID: \"1e11d89c-e71a-4a17-b1dd-da3883753fde\") " pod="openshift-console-operator/console-operator-58897d9998-64nzp" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.211829 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/48038042-7b0f-48d9-9f90-6c0b9dd179d6-service-ca\") pod \"console-f9d7485db-9c622\" (UID: \"48038042-7b0f-48d9-9f90-6c0b9dd179d6\") " pod="openshift-console/console-f9d7485db-9c622" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.211851 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3d179b2f-6775-4d3a-be36-3960799428db-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-nhbkn\" (UID: \"3d179b2f-6775-4d3a-be36-3960799428db\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nhbkn" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.211874 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/782feb3b-5fe4-413e-87a0-9602f412897e-audit-dir\") pod \"apiserver-76f77b778f-rkmxh\" (UID: \"782feb3b-5fe4-413e-87a0-9602f412897e\") " pod="openshift-apiserver/apiserver-76f77b778f-rkmxh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.211892 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0aa048c9-05f2-41f3-ad26-c88d5c98c453-client-ca\") pod \"route-controller-manager-6576b87f9c-7zfzd\" (UID: \"0aa048c9-05f2-41f3-ad26-c88d5c98c453\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-7zfzd" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.211913 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4gxf2\" (UniqueName: \"kubernetes.io/projected/bcea690d-529b-4175-b5fd-a1a07970cf0d-kube-api-access-4gxf2\") pod \"openshift-config-operator-7777fb866f-4gllz\" (UID: \"bcea690d-529b-4175-b5fd-a1a07970cf0d\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-4gllz" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.211947 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-48llr\" (UniqueName: \"kubernetes.io/projected/915c88c7-cac2-48b3-ab7a-6e23e7240465-kube-api-access-48llr\") pod \"controller-manager-879f6c89f-b7687\" (UID: \"915c88c7-cac2-48b3-ab7a-6e23e7240465\") " pod="openshift-controller-manager/controller-manager-879f6c89f-b7687" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.211969 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/40a88e42-a0d7-4703-a3f8-25f524f90eca-audit-policies\") pod \"apiserver-7bbb656c7d-zdzdl\" (UID: \"40a88e42-a0d7-4703-a3f8-25f524f90eca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zdzdl" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.211987 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/40a88e42-a0d7-4703-a3f8-25f524f90eca-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-zdzdl\" (UID: \"40a88e42-a0d7-4703-a3f8-25f524f90eca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zdzdl" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.212010 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/c3117f4b-6f3a-4131-9001-d39222e6f268-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-kzdfh\" (UID: \"c3117f4b-6f3a-4131-9001-d39222e6f268\") " pod="openshift-authentication/oauth-openshift-558db77b4-kzdfh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.212033 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/002aa1a9-0253-4b17-8c8c-d23c830c46cc-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-jwgnx\" (UID: \"002aa1a9-0253-4b17-8c8c-d23c830c46cc\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jwgnx" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.212064 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/782feb3b-5fe4-413e-87a0-9602f412897e-serving-cert\") pod \"apiserver-76f77b778f-rkmxh\" (UID: \"782feb3b-5fe4-413e-87a0-9602f412897e\") " pod="openshift-apiserver/apiserver-76f77b778f-rkmxh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.212085 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/72117afc-2e5f-4696-b515-76aced63c30f-trusted-ca\") pod \"ingress-operator-5b745b69d9-q2bmk\" (UID: \"72117afc-2e5f-4696-b515-76aced63c30f\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-q2bmk" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.212108 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4p49w\" (UniqueName: \"kubernetes.io/projected/0aa048c9-05f2-41f3-ad26-c88d5c98c453-kube-api-access-4p49w\") pod \"route-controller-manager-6576b87f9c-7zfzd\" (UID: \"0aa048c9-05f2-41f3-ad26-c88d5c98c453\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-7zfzd" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.212130 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/12c0356d-b61a-47e3-a93d-8d2d743cc9b5-config\") pod \"authentication-operator-69f744f599-rnr95\" (UID: \"12c0356d-b61a-47e3-a93d-8d2d743cc9b5\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rnr95" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.212164 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/73058959-ba27-4d94-8662-a213a53113e9-machine-approver-tls\") pod \"machine-approver-56656f9798-44d5b\" (UID: \"73058959-ba27-4d94-8662-a213a53113e9\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-44d5b" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.212187 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h74fl\" (UniqueName: \"kubernetes.io/projected/72117afc-2e5f-4696-b515-76aced63c30f-kube-api-access-h74fl\") pod \"ingress-operator-5b745b69d9-q2bmk\" (UID: \"72117afc-2e5f-4696-b515-76aced63c30f\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-q2bmk" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.212216 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5cd4f96b-673e-4518-a8ee-da3ccb7a86b0-config\") pod \"machine-api-operator-5694c8668f-vx9s2\" (UID: \"5cd4f96b-673e-4518-a8ee-da3ccb7a86b0\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-vx9s2" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.212240 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/48038042-7b0f-48d9-9f90-6c0b9dd179d6-console-oauth-config\") pod \"console-f9d7485db-9c622\" (UID: \"48038042-7b0f-48d9-9f90-6c0b9dd179d6\") " pod="openshift-console/console-f9d7485db-9c622" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.212260 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/48038042-7b0f-48d9-9f90-6c0b9dd179d6-trusted-ca-bundle\") pod \"console-f9d7485db-9c622\" (UID: \"48038042-7b0f-48d9-9f90-6c0b9dd179d6\") " pod="openshift-console/console-f9d7485db-9c622" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.212281 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qlmh2\" (UniqueName: \"kubernetes.io/projected/40a88e42-a0d7-4703-a3f8-25f524f90eca-kube-api-access-qlmh2\") pod \"apiserver-7bbb656c7d-zdzdl\" (UID: \"40a88e42-a0d7-4703-a3f8-25f524f90eca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zdzdl" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.212312 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/40a88e42-a0d7-4703-a3f8-25f524f90eca-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-zdzdl\" (UID: \"40a88e42-a0d7-4703-a3f8-25f524f90eca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zdzdl" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.212334 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bcea690d-529b-4175-b5fd-a1a07970cf0d-available-featuregates\") pod \"openshift-config-operator-7777fb866f-4gllz\" (UID: \"bcea690d-529b-4175-b5fd-a1a07970cf0d\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-4gllz" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.212353 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/782feb3b-5fe4-413e-87a0-9602f412897e-image-import-ca\") pod \"apiserver-76f77b778f-rkmxh\" (UID: \"782feb3b-5fe4-413e-87a0-9602f412897e\") " pod="openshift-apiserver/apiserver-76f77b778f-rkmxh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.212372 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g7jlp\" (UniqueName: \"kubernetes.io/projected/0f4f139a-e961-4510-9a82-a2de30587b6f-kube-api-access-g7jlp\") pod \"openshift-apiserver-operator-796bbdcf4f-k49xj\" (UID: \"0f4f139a-e961-4510-9a82-a2de30587b6f\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-k49xj" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.212390 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/12c0356d-b61a-47e3-a93d-8d2d743cc9b5-service-ca-bundle\") pod \"authentication-operator-69f744f599-rnr95\" (UID: \"12c0356d-b61a-47e3-a93d-8d2d743cc9b5\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rnr95" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.212411 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/c3117f4b-6f3a-4131-9001-d39222e6f268-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-kzdfh\" (UID: \"c3117f4b-6f3a-4131-9001-d39222e6f268\") " pod="openshift-authentication/oauth-openshift-558db77b4-kzdfh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.212431 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1e11d89c-e71a-4a17-b1dd-da3883753fde-serving-cert\") pod \"console-operator-58897d9998-64nzp\" (UID: \"1e11d89c-e71a-4a17-b1dd-da3883753fde\") " pod="openshift-console-operator/console-operator-58897d9998-64nzp" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.212451 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-psmlg\" (UniqueName: \"kubernetes.io/projected/48038042-7b0f-48d9-9f90-6c0b9dd179d6-kube-api-access-psmlg\") pod \"console-f9d7485db-9c622\" (UID: \"48038042-7b0f-48d9-9f90-6c0b9dd179d6\") " pod="openshift-console/console-f9d7485db-9c622" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.212472 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0f4f139a-e961-4510-9a82-a2de30587b6f-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-k49xj\" (UID: \"0f4f139a-e961-4510-9a82-a2de30587b6f\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-k49xj" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.212492 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/48038042-7b0f-48d9-9f90-6c0b9dd179d6-console-config\") pod \"console-f9d7485db-9c622\" (UID: \"48038042-7b0f-48d9-9f90-6c0b9dd179d6\") " pod="openshift-console/console-f9d7485db-9c622" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.212513 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/c3117f4b-6f3a-4131-9001-d39222e6f268-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-kzdfh\" (UID: \"c3117f4b-6f3a-4131-9001-d39222e6f268\") " pod="openshift-authentication/oauth-openshift-558db77b4-kzdfh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.212534 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/c3117f4b-6f3a-4131-9001-d39222e6f268-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-kzdfh\" (UID: \"c3117f4b-6f3a-4131-9001-d39222e6f268\") " pod="openshift-authentication/oauth-openshift-558db77b4-kzdfh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.212554 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/12c0356d-b61a-47e3-a93d-8d2d743cc9b5-serving-cert\") pod \"authentication-operator-69f744f599-rnr95\" (UID: \"12c0356d-b61a-47e3-a93d-8d2d743cc9b5\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rnr95" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.212574 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0aa048c9-05f2-41f3-ad26-c88d5c98c453-config\") pod \"route-controller-manager-6576b87f9c-7zfzd\" (UID: \"0aa048c9-05f2-41f3-ad26-c88d5c98c453\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-7zfzd" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.212592 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/002aa1a9-0253-4b17-8c8c-d23c830c46cc-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-jwgnx\" (UID: \"002aa1a9-0253-4b17-8c8c-d23c830c46cc\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jwgnx" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.212611 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/73058959-ba27-4d94-8662-a213a53113e9-auth-proxy-config\") pod \"machine-approver-56656f9798-44d5b\" (UID: \"73058959-ba27-4d94-8662-a213a53113e9\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-44d5b" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.212634 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/782feb3b-5fe4-413e-87a0-9602f412897e-trusted-ca-bundle\") pod \"apiserver-76f77b778f-rkmxh\" (UID: \"782feb3b-5fe4-413e-87a0-9602f412897e\") " pod="openshift-apiserver/apiserver-76f77b778f-rkmxh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.212653 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2jd75\" (UniqueName: \"kubernetes.io/projected/782feb3b-5fe4-413e-87a0-9602f412897e-kube-api-access-2jd75\") pod \"apiserver-76f77b778f-rkmxh\" (UID: \"782feb3b-5fe4-413e-87a0-9602f412897e\") " pod="openshift-apiserver/apiserver-76f77b778f-rkmxh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.212672 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bcea690d-529b-4175-b5fd-a1a07970cf0d-serving-cert\") pod \"openshift-config-operator-7777fb866f-4gllz\" (UID: \"bcea690d-529b-4175-b5fd-a1a07970cf0d\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-4gllz" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.212691 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/915c88c7-cac2-48b3-ab7a-6e23e7240465-client-ca\") pod \"controller-manager-879f6c89f-b7687\" (UID: \"915c88c7-cac2-48b3-ab7a-6e23e7240465\") " pod="openshift-controller-manager/controller-manager-879f6c89f-b7687" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.213246 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-lcd59"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.214386 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.215608 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/002aa1a9-0253-4b17-8c8c-d23c830c46cc-config\") pod \"kube-apiserver-operator-766d6c64bb-jwgnx\" (UID: \"002aa1a9-0253-4b17-8c8c-d23c830c46cc\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jwgnx" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.215662 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-wk9v7"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.215793 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-lcd59" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.216248 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/5cd4f96b-673e-4518-a8ee-da3ccb7a86b0-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-vx9s2\" (UID: \"5cd4f96b-673e-4518-a8ee-da3ccb7a86b0\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-vx9s2" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.216323 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/c3117f4b-6f3a-4131-9001-d39222e6f268-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-kzdfh\" (UID: \"c3117f4b-6f3a-4131-9001-d39222e6f268\") " pod="openshift-authentication/oauth-openshift-558db77b4-kzdfh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.216348 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k2kb8\" (UniqueName: \"kubernetes.io/projected/c3117f4b-6f3a-4131-9001-d39222e6f268-kube-api-access-k2kb8\") pod \"oauth-openshift-558db77b4-kzdfh\" (UID: \"c3117f4b-6f3a-4131-9001-d39222e6f268\") " pod="openshift-authentication/oauth-openshift-558db77b4-kzdfh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.216584 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bz6l4\" (UniqueName: \"kubernetes.io/projected/12c0356d-b61a-47e3-a93d-8d2d743cc9b5-kube-api-access-bz6l4\") pod \"authentication-operator-69f744f599-rnr95\" (UID: \"12c0356d-b61a-47e3-a93d-8d2d743cc9b5\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rnr95" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.216609 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6nwkn\" (UniqueName: \"kubernetes.io/projected/f119bafa-ed3a-42d2-876f-c63999b216e1-kube-api-access-6nwkn\") pod \"downloads-7954f5f757-2nb5t\" (UID: \"f119bafa-ed3a-42d2-876f-c63999b216e1\") " pod="openshift-console/downloads-7954f5f757-2nb5t" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.216625 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/915c88c7-cac2-48b3-ab7a-6e23e7240465-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-b7687\" (UID: \"915c88c7-cac2-48b3-ab7a-6e23e7240465\") " pod="openshift-controller-manager/controller-manager-879f6c89f-b7687" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.216653 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k6mtt\" (UniqueName: \"kubernetes.io/projected/1e11d89c-e71a-4a17-b1dd-da3883753fde-kube-api-access-k6mtt\") pod \"console-operator-58897d9998-64nzp\" (UID: \"1e11d89c-e71a-4a17-b1dd-da3883753fde\") " pod="openshift-console-operator/console-operator-58897d9998-64nzp" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.216668 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/c3117f4b-6f3a-4131-9001-d39222e6f268-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-kzdfh\" (UID: \"c3117f4b-6f3a-4131-9001-d39222e6f268\") " pod="openshift-authentication/oauth-openshift-558db77b4-kzdfh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.216687 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0f4f139a-e961-4510-9a82-a2de30587b6f-config\") pod \"openshift-apiserver-operator-796bbdcf4f-k49xj\" (UID: \"0f4f139a-e961-4510-9a82-a2de30587b6f\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-k49xj" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.216703 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/3d179b2f-6775-4d3a-be36-3960799428db-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-nhbkn\" (UID: \"3d179b2f-6775-4d3a-be36-3960799428db\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nhbkn" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.216719 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/c3117f4b-6f3a-4131-9001-d39222e6f268-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-kzdfh\" (UID: \"c3117f4b-6f3a-4131-9001-d39222e6f268\") " pod="openshift-authentication/oauth-openshift-558db77b4-kzdfh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.216735 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/c3117f4b-6f3a-4131-9001-d39222e6f268-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-kzdfh\" (UID: \"c3117f4b-6f3a-4131-9001-d39222e6f268\") " pod="openshift-authentication/oauth-openshift-558db77b4-kzdfh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.216750 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-97z8z\" (UniqueName: \"kubernetes.io/projected/73058959-ba27-4d94-8662-a213a53113e9-kube-api-access-97z8z\") pod \"machine-approver-56656f9798-44d5b\" (UID: \"73058959-ba27-4d94-8662-a213a53113e9\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-44d5b" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.216766 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/782feb3b-5fe4-413e-87a0-9602f412897e-encryption-config\") pod \"apiserver-76f77b778f-rkmxh\" (UID: \"782feb3b-5fe4-413e-87a0-9602f412897e\") " pod="openshift-apiserver/apiserver-76f77b778f-rkmxh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.216781 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/40a88e42-a0d7-4703-a3f8-25f524f90eca-encryption-config\") pod \"apiserver-7bbb656c7d-zdzdl\" (UID: \"40a88e42-a0d7-4703-a3f8-25f524f90eca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zdzdl" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.216797 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/782feb3b-5fe4-413e-87a0-9602f412897e-node-pullsecrets\") pod \"apiserver-76f77b778f-rkmxh\" (UID: \"782feb3b-5fe4-413e-87a0-9602f412897e\") " pod="openshift-apiserver/apiserver-76f77b778f-rkmxh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.216811 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/40a88e42-a0d7-4703-a3f8-25f524f90eca-etcd-client\") pod \"apiserver-7bbb656c7d-zdzdl\" (UID: \"40a88e42-a0d7-4703-a3f8-25f524f90eca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zdzdl" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.216826 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1e11d89c-e71a-4a17-b1dd-da3883753fde-trusted-ca\") pod \"console-operator-58897d9998-64nzp\" (UID: \"1e11d89c-e71a-4a17-b1dd-da3883753fde\") " pod="openshift-console-operator/console-operator-58897d9998-64nzp" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.216840 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/48038042-7b0f-48d9-9f90-6c0b9dd179d6-oauth-serving-cert\") pod \"console-f9d7485db-9c622\" (UID: \"48038042-7b0f-48d9-9f90-6c0b9dd179d6\") " pod="openshift-console/console-f9d7485db-9c622" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.216854 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/915c88c7-cac2-48b3-ab7a-6e23e7240465-config\") pod \"controller-manager-879f6c89f-b7687\" (UID: \"915c88c7-cac2-48b3-ab7a-6e23e7240465\") " pod="openshift-controller-manager/controller-manager-879f6c89f-b7687" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.216869 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/c3117f4b-6f3a-4131-9001-d39222e6f268-audit-dir\") pod \"oauth-openshift-558db77b4-kzdfh\" (UID: \"c3117f4b-6f3a-4131-9001-d39222e6f268\") " pod="openshift-authentication/oauth-openshift-558db77b4-kzdfh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.216885 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/782feb3b-5fe4-413e-87a0-9602f412897e-config\") pod \"apiserver-76f77b778f-rkmxh\" (UID: \"782feb3b-5fe4-413e-87a0-9602f412897e\") " pod="openshift-apiserver/apiserver-76f77b778f-rkmxh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.216898 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/782feb3b-5fe4-413e-87a0-9602f412897e-etcd-serving-ca\") pod \"apiserver-76f77b778f-rkmxh\" (UID: \"782feb3b-5fe4-413e-87a0-9602f412897e\") " pod="openshift-apiserver/apiserver-76f77b778f-rkmxh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.216913 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-82rp2\" (UniqueName: \"kubernetes.io/projected/5cd4f96b-673e-4518-a8ee-da3ccb7a86b0-kube-api-access-82rp2\") pod \"machine-api-operator-5694c8668f-vx9s2\" (UID: \"5cd4f96b-673e-4518-a8ee-da3ccb7a86b0\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-vx9s2" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.216929 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/48038042-7b0f-48d9-9f90-6c0b9dd179d6-console-serving-cert\") pod \"console-f9d7485db-9c622\" (UID: \"48038042-7b0f-48d9-9f90-6c0b9dd179d6\") " pod="openshift-console/console-f9d7485db-9c622" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.216944 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/c3117f4b-6f3a-4131-9001-d39222e6f268-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-kzdfh\" (UID: \"c3117f4b-6f3a-4131-9001-d39222e6f268\") " pod="openshift-authentication/oauth-openshift-558db77b4-kzdfh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.216959 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/c3117f4b-6f3a-4131-9001-d39222e6f268-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-kzdfh\" (UID: \"c3117f4b-6f3a-4131-9001-d39222e6f268\") " pod="openshift-authentication/oauth-openshift-558db77b4-kzdfh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.216978 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/40a88e42-a0d7-4703-a3f8-25f524f90eca-serving-cert\") pod \"apiserver-7bbb656c7d-zdzdl\" (UID: \"40a88e42-a0d7-4703-a3f8-25f524f90eca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zdzdl" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.216992 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/12c0356d-b61a-47e3-a93d-8d2d743cc9b5-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-rnr95\" (UID: \"12c0356d-b61a-47e3-a93d-8d2d743cc9b5\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rnr95" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.216411 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-rnr95"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.216450 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-wk9v7" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.217389 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-m9rf5"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.218560 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-7zfzd"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.220339 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.220652 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-4vvd9"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.221717 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-64nzp"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.222606 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-q2bmk"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.223702 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-pcb9b"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.224173 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-pcb9b" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.225407 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-2nb5t"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.226593 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jwgnx"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.227515 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-9c622"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.228420 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-zdzdl"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.230382 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-wqn7l"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.231037 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-wqn7l" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.231337 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-tqddg"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.232111 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-tqddg" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.232301 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-hqm52"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.232644 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-hqm52" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.233329 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-w58zz"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.234017 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-w58zz" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.234551 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-hq6h7"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.235411 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-hq6h7" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.235542 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-22ncf"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.236180 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-22ncf" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.236549 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mjbr2"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.237577 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-s87ds"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.238165 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-s87ds" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.238405 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-flncn"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.238931 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-flncn" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.239414 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-9j8t7"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.240723 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mjbr2" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.241040 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-55drw"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.241184 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-9j8t7" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.241731 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415900-2lmxf"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.242186 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415900-2lmxf" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.242553 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-55drw" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.242566 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-69bc9"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.243629 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-wxsmf"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.244231 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-wxsmf" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.244778 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-kzdfh"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.245601 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nhbkn"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.246631 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-k49xj"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.248032 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-wk9v7"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.248912 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-rvmt6"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.249931 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-lcd59"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.250869 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-w58zz"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.251883 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tfkgs"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.261777 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-4gllz"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.263226 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.264009 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mjbr2"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.265485 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-hqm52"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.266977 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-wxsmf"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.268270 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-hzrmq"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.269340 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-9j8t7"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.270445 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-wqn7l"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.271562 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-djddj"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.272679 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-djddj" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.273135 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-22ncf"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.274336 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-pcb9b"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.276246 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-tqddg"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.277222 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-55drw"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.278476 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-flncn"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.280126 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-hq6h7"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.280814 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.282042 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415900-2lmxf"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.283534 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-s87ds"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.284703 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-djddj"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.285773 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-xhb8z"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.286699 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-xhb8z" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.287162 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-4ctlc"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.287692 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-4ctlc" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.288474 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-xhb8z"] Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.301430 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.317740 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/5cd4f96b-673e-4518-a8ee-da3ccb7a86b0-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-vx9s2\" (UID: \"5cd4f96b-673e-4518-a8ee-da3ccb7a86b0\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-vx9s2" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.317886 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/c3117f4b-6f3a-4131-9001-d39222e6f268-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-kzdfh\" (UID: \"c3117f4b-6f3a-4131-9001-d39222e6f268\") " pod="openshift-authentication/oauth-openshift-558db77b4-kzdfh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.318001 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k2kb8\" (UniqueName: \"kubernetes.io/projected/c3117f4b-6f3a-4131-9001-d39222e6f268-kube-api-access-k2kb8\") pod \"oauth-openshift-558db77b4-kzdfh\" (UID: \"c3117f4b-6f3a-4131-9001-d39222e6f268\") " pod="openshift-authentication/oauth-openshift-558db77b4-kzdfh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.318089 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bz6l4\" (UniqueName: \"kubernetes.io/projected/12c0356d-b61a-47e3-a93d-8d2d743cc9b5-kube-api-access-bz6l4\") pod \"authentication-operator-69f744f599-rnr95\" (UID: \"12c0356d-b61a-47e3-a93d-8d2d743cc9b5\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rnr95" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.318219 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6nwkn\" (UniqueName: \"kubernetes.io/projected/f119bafa-ed3a-42d2-876f-c63999b216e1-kube-api-access-6nwkn\") pod \"downloads-7954f5f757-2nb5t\" (UID: \"f119bafa-ed3a-42d2-876f-c63999b216e1\") " pod="openshift-console/downloads-7954f5f757-2nb5t" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.318327 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/915c88c7-cac2-48b3-ab7a-6e23e7240465-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-b7687\" (UID: \"915c88c7-cac2-48b3-ab7a-6e23e7240465\") " pod="openshift-controller-manager/controller-manager-879f6c89f-b7687" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.318415 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k6mtt\" (UniqueName: \"kubernetes.io/projected/1e11d89c-e71a-4a17-b1dd-da3883753fde-kube-api-access-k6mtt\") pod \"console-operator-58897d9998-64nzp\" (UID: \"1e11d89c-e71a-4a17-b1dd-da3883753fde\") " pod="openshift-console-operator/console-operator-58897d9998-64nzp" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.318495 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/c3117f4b-6f3a-4131-9001-d39222e6f268-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-kzdfh\" (UID: \"c3117f4b-6f3a-4131-9001-d39222e6f268\") " pod="openshift-authentication/oauth-openshift-558db77b4-kzdfh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.318564 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0f4f139a-e961-4510-9a82-a2de30587b6f-config\") pod \"openshift-apiserver-operator-796bbdcf4f-k49xj\" (UID: \"0f4f139a-e961-4510-9a82-a2de30587b6f\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-k49xj" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.318637 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/3d179b2f-6775-4d3a-be36-3960799428db-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-nhbkn\" (UID: \"3d179b2f-6775-4d3a-be36-3960799428db\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nhbkn" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.318704 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/c3117f4b-6f3a-4131-9001-d39222e6f268-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-kzdfh\" (UID: \"c3117f4b-6f3a-4131-9001-d39222e6f268\") " pod="openshift-authentication/oauth-openshift-558db77b4-kzdfh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.318768 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/c3117f4b-6f3a-4131-9001-d39222e6f268-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-kzdfh\" (UID: \"c3117f4b-6f3a-4131-9001-d39222e6f268\") " pod="openshift-authentication/oauth-openshift-558db77b4-kzdfh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.318839 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-97z8z\" (UniqueName: \"kubernetes.io/projected/73058959-ba27-4d94-8662-a213a53113e9-kube-api-access-97z8z\") pod \"machine-approver-56656f9798-44d5b\" (UID: \"73058959-ba27-4d94-8662-a213a53113e9\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-44d5b" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.318934 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/782feb3b-5fe4-413e-87a0-9602f412897e-encryption-config\") pod \"apiserver-76f77b778f-rkmxh\" (UID: \"782feb3b-5fe4-413e-87a0-9602f412897e\") " pod="openshift-apiserver/apiserver-76f77b778f-rkmxh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.319013 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/40a88e42-a0d7-4703-a3f8-25f524f90eca-encryption-config\") pod \"apiserver-7bbb656c7d-zdzdl\" (UID: \"40a88e42-a0d7-4703-a3f8-25f524f90eca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zdzdl" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.319091 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/782feb3b-5fe4-413e-87a0-9602f412897e-node-pullsecrets\") pod \"apiserver-76f77b778f-rkmxh\" (UID: \"782feb3b-5fe4-413e-87a0-9602f412897e\") " pod="openshift-apiserver/apiserver-76f77b778f-rkmxh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.319203 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/40a88e42-a0d7-4703-a3f8-25f524f90eca-etcd-client\") pod \"apiserver-7bbb656c7d-zdzdl\" (UID: \"40a88e42-a0d7-4703-a3f8-25f524f90eca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zdzdl" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.319292 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1e11d89c-e71a-4a17-b1dd-da3883753fde-trusted-ca\") pod \"console-operator-58897d9998-64nzp\" (UID: \"1e11d89c-e71a-4a17-b1dd-da3883753fde\") " pod="openshift-console-operator/console-operator-58897d9998-64nzp" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.320230 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/48038042-7b0f-48d9-9f90-6c0b9dd179d6-oauth-serving-cert\") pod \"console-f9d7485db-9c622\" (UID: \"48038042-7b0f-48d9-9f90-6c0b9dd179d6\") " pod="openshift-console/console-f9d7485db-9c622" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.320271 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/915c88c7-cac2-48b3-ab7a-6e23e7240465-config\") pod \"controller-manager-879f6c89f-b7687\" (UID: \"915c88c7-cac2-48b3-ab7a-6e23e7240465\") " pod="openshift-controller-manager/controller-manager-879f6c89f-b7687" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.320291 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/c3117f4b-6f3a-4131-9001-d39222e6f268-audit-dir\") pod \"oauth-openshift-558db77b4-kzdfh\" (UID: \"c3117f4b-6f3a-4131-9001-d39222e6f268\") " pod="openshift-authentication/oauth-openshift-558db77b4-kzdfh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.320318 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/782feb3b-5fe4-413e-87a0-9602f412897e-config\") pod \"apiserver-76f77b778f-rkmxh\" (UID: \"782feb3b-5fe4-413e-87a0-9602f412897e\") " pod="openshift-apiserver/apiserver-76f77b778f-rkmxh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.320342 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/782feb3b-5fe4-413e-87a0-9602f412897e-etcd-serving-ca\") pod \"apiserver-76f77b778f-rkmxh\" (UID: \"782feb3b-5fe4-413e-87a0-9602f412897e\") " pod="openshift-apiserver/apiserver-76f77b778f-rkmxh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.320368 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-82rp2\" (UniqueName: \"kubernetes.io/projected/5cd4f96b-673e-4518-a8ee-da3ccb7a86b0-kube-api-access-82rp2\") pod \"machine-api-operator-5694c8668f-vx9s2\" (UID: \"5cd4f96b-673e-4518-a8ee-da3ccb7a86b0\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-vx9s2" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.320396 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/48038042-7b0f-48d9-9f90-6c0b9dd179d6-console-serving-cert\") pod \"console-f9d7485db-9c622\" (UID: \"48038042-7b0f-48d9-9f90-6c0b9dd179d6\") " pod="openshift-console/console-f9d7485db-9c622" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.320419 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/c3117f4b-6f3a-4131-9001-d39222e6f268-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-kzdfh\" (UID: \"c3117f4b-6f3a-4131-9001-d39222e6f268\") " pod="openshift-authentication/oauth-openshift-558db77b4-kzdfh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.320441 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/c3117f4b-6f3a-4131-9001-d39222e6f268-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-kzdfh\" (UID: \"c3117f4b-6f3a-4131-9001-d39222e6f268\") " pod="openshift-authentication/oauth-openshift-558db77b4-kzdfh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.320460 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0f4f139a-e961-4510-9a82-a2de30587b6f-config\") pod \"openshift-apiserver-operator-796bbdcf4f-k49xj\" (UID: \"0f4f139a-e961-4510-9a82-a2de30587b6f\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-k49xj" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.320474 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/40a88e42-a0d7-4703-a3f8-25f524f90eca-serving-cert\") pod \"apiserver-7bbb656c7d-zdzdl\" (UID: \"40a88e42-a0d7-4703-a3f8-25f524f90eca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zdzdl" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.320551 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/12c0356d-b61a-47e3-a93d-8d2d743cc9b5-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-rnr95\" (UID: \"12c0356d-b61a-47e3-a93d-8d2d743cc9b5\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rnr95" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.320583 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/72117afc-2e5f-4696-b515-76aced63c30f-bound-sa-token\") pod \"ingress-operator-5b745b69d9-q2bmk\" (UID: \"72117afc-2e5f-4696-b515-76aced63c30f\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-q2bmk" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.320603 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/c3117f4b-6f3a-4131-9001-d39222e6f268-audit-policies\") pod \"oauth-openshift-558db77b4-kzdfh\" (UID: \"c3117f4b-6f3a-4131-9001-d39222e6f268\") " pod="openshift-authentication/oauth-openshift-558db77b4-kzdfh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.320623 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/72117afc-2e5f-4696-b515-76aced63c30f-metrics-tls\") pod \"ingress-operator-5b745b69d9-q2bmk\" (UID: \"72117afc-2e5f-4696-b515-76aced63c30f\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-q2bmk" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.320642 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c3117f4b-6f3a-4131-9001-d39222e6f268-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-kzdfh\" (UID: \"c3117f4b-6f3a-4131-9001-d39222e6f268\") " pod="openshift-authentication/oauth-openshift-558db77b4-kzdfh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.320674 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/782feb3b-5fe4-413e-87a0-9602f412897e-audit\") pod \"apiserver-76f77b778f-rkmxh\" (UID: \"782feb3b-5fe4-413e-87a0-9602f412897e\") " pod="openshift-apiserver/apiserver-76f77b778f-rkmxh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.320692 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0aa048c9-05f2-41f3-ad26-c88d5c98c453-serving-cert\") pod \"route-controller-manager-6576b87f9c-7zfzd\" (UID: \"0aa048c9-05f2-41f3-ad26-c88d5c98c453\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-7zfzd" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.320709 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/5cd4f96b-673e-4518-a8ee-da3ccb7a86b0-images\") pod \"machine-api-operator-5694c8668f-vx9s2\" (UID: \"5cd4f96b-673e-4518-a8ee-da3ccb7a86b0\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-vx9s2" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.320726 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/3d179b2f-6775-4d3a-be36-3960799428db-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-nhbkn\" (UID: \"3d179b2f-6775-4d3a-be36-3960799428db\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nhbkn" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.320742 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/915c88c7-cac2-48b3-ab7a-6e23e7240465-serving-cert\") pod \"controller-manager-879f6c89f-b7687\" (UID: \"915c88c7-cac2-48b3-ab7a-6e23e7240465\") " pod="openshift-controller-manager/controller-manager-879f6c89f-b7687" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.320758 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rjf54\" (UniqueName: \"kubernetes.io/projected/3d179b2f-6775-4d3a-be36-3960799428db-kube-api-access-rjf54\") pod \"cluster-image-registry-operator-dc59b4c8b-nhbkn\" (UID: \"3d179b2f-6775-4d3a-be36-3960799428db\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nhbkn" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.320774 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/73058959-ba27-4d94-8662-a213a53113e9-config\") pod \"machine-approver-56656f9798-44d5b\" (UID: \"73058959-ba27-4d94-8662-a213a53113e9\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-44d5b" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.320790 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/782feb3b-5fe4-413e-87a0-9602f412897e-etcd-client\") pod \"apiserver-76f77b778f-rkmxh\" (UID: \"782feb3b-5fe4-413e-87a0-9602f412897e\") " pod="openshift-apiserver/apiserver-76f77b778f-rkmxh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.320805 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/40a88e42-a0d7-4703-a3f8-25f524f90eca-audit-dir\") pod \"apiserver-7bbb656c7d-zdzdl\" (UID: \"40a88e42-a0d7-4703-a3f8-25f524f90eca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zdzdl" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.320825 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e11d89c-e71a-4a17-b1dd-da3883753fde-config\") pod \"console-operator-58897d9998-64nzp\" (UID: \"1e11d89c-e71a-4a17-b1dd-da3883753fde\") " pod="openshift-console-operator/console-operator-58897d9998-64nzp" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.320843 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/48038042-7b0f-48d9-9f90-6c0b9dd179d6-service-ca\") pod \"console-f9d7485db-9c622\" (UID: \"48038042-7b0f-48d9-9f90-6c0b9dd179d6\") " pod="openshift-console/console-f9d7485db-9c622" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.320861 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3d179b2f-6775-4d3a-be36-3960799428db-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-nhbkn\" (UID: \"3d179b2f-6775-4d3a-be36-3960799428db\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nhbkn" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.320901 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/782feb3b-5fe4-413e-87a0-9602f412897e-audit-dir\") pod \"apiserver-76f77b778f-rkmxh\" (UID: \"782feb3b-5fe4-413e-87a0-9602f412897e\") " pod="openshift-apiserver/apiserver-76f77b778f-rkmxh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.320922 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0aa048c9-05f2-41f3-ad26-c88d5c98c453-client-ca\") pod \"route-controller-manager-6576b87f9c-7zfzd\" (UID: \"0aa048c9-05f2-41f3-ad26-c88d5c98c453\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-7zfzd" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.320941 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4gxf2\" (UniqueName: \"kubernetes.io/projected/bcea690d-529b-4175-b5fd-a1a07970cf0d-kube-api-access-4gxf2\") pod \"openshift-config-operator-7777fb866f-4gllz\" (UID: \"bcea690d-529b-4175-b5fd-a1a07970cf0d\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-4gllz" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.320972 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-48llr\" (UniqueName: \"kubernetes.io/projected/915c88c7-cac2-48b3-ab7a-6e23e7240465-kube-api-access-48llr\") pod \"controller-manager-879f6c89f-b7687\" (UID: \"915c88c7-cac2-48b3-ab7a-6e23e7240465\") " pod="openshift-controller-manager/controller-manager-879f6c89f-b7687" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.321000 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/40a88e42-a0d7-4703-a3f8-25f524f90eca-audit-policies\") pod \"apiserver-7bbb656c7d-zdzdl\" (UID: \"40a88e42-a0d7-4703-a3f8-25f524f90eca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zdzdl" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.321022 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/40a88e42-a0d7-4703-a3f8-25f524f90eca-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-zdzdl\" (UID: \"40a88e42-a0d7-4703-a3f8-25f524f90eca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zdzdl" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.321041 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/c3117f4b-6f3a-4131-9001-d39222e6f268-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-kzdfh\" (UID: \"c3117f4b-6f3a-4131-9001-d39222e6f268\") " pod="openshift-authentication/oauth-openshift-558db77b4-kzdfh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.321059 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/002aa1a9-0253-4b17-8c8c-d23c830c46cc-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-jwgnx\" (UID: \"002aa1a9-0253-4b17-8c8c-d23c830c46cc\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jwgnx" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.321086 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/782feb3b-5fe4-413e-87a0-9602f412897e-serving-cert\") pod \"apiserver-76f77b778f-rkmxh\" (UID: \"782feb3b-5fe4-413e-87a0-9602f412897e\") " pod="openshift-apiserver/apiserver-76f77b778f-rkmxh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.321102 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/72117afc-2e5f-4696-b515-76aced63c30f-trusted-ca\") pod \"ingress-operator-5b745b69d9-q2bmk\" (UID: \"72117afc-2e5f-4696-b515-76aced63c30f\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-q2bmk" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.321123 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4p49w\" (UniqueName: \"kubernetes.io/projected/0aa048c9-05f2-41f3-ad26-c88d5c98c453-kube-api-access-4p49w\") pod \"route-controller-manager-6576b87f9c-7zfzd\" (UID: \"0aa048c9-05f2-41f3-ad26-c88d5c98c453\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-7zfzd" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.321139 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/12c0356d-b61a-47e3-a93d-8d2d743cc9b5-config\") pod \"authentication-operator-69f744f599-rnr95\" (UID: \"12c0356d-b61a-47e3-a93d-8d2d743cc9b5\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rnr95" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.321173 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/73058959-ba27-4d94-8662-a213a53113e9-machine-approver-tls\") pod \"machine-approver-56656f9798-44d5b\" (UID: \"73058959-ba27-4d94-8662-a213a53113e9\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-44d5b" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.321193 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h74fl\" (UniqueName: \"kubernetes.io/projected/72117afc-2e5f-4696-b515-76aced63c30f-kube-api-access-h74fl\") pod \"ingress-operator-5b745b69d9-q2bmk\" (UID: \"72117afc-2e5f-4696-b515-76aced63c30f\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-q2bmk" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.321218 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5cd4f96b-673e-4518-a8ee-da3ccb7a86b0-config\") pod \"machine-api-operator-5694c8668f-vx9s2\" (UID: \"5cd4f96b-673e-4518-a8ee-da3ccb7a86b0\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-vx9s2" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.321236 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/48038042-7b0f-48d9-9f90-6c0b9dd179d6-console-oauth-config\") pod \"console-f9d7485db-9c622\" (UID: \"48038042-7b0f-48d9-9f90-6c0b9dd179d6\") " pod="openshift-console/console-f9d7485db-9c622" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.321252 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/48038042-7b0f-48d9-9f90-6c0b9dd179d6-trusted-ca-bundle\") pod \"console-f9d7485db-9c622\" (UID: \"48038042-7b0f-48d9-9f90-6c0b9dd179d6\") " pod="openshift-console/console-f9d7485db-9c622" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.321269 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qlmh2\" (UniqueName: \"kubernetes.io/projected/40a88e42-a0d7-4703-a3f8-25f524f90eca-kube-api-access-qlmh2\") pod \"apiserver-7bbb656c7d-zdzdl\" (UID: \"40a88e42-a0d7-4703-a3f8-25f524f90eca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zdzdl" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.321292 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/40a88e42-a0d7-4703-a3f8-25f524f90eca-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-zdzdl\" (UID: \"40a88e42-a0d7-4703-a3f8-25f524f90eca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zdzdl" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.321309 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bcea690d-529b-4175-b5fd-a1a07970cf0d-available-featuregates\") pod \"openshift-config-operator-7777fb866f-4gllz\" (UID: \"bcea690d-529b-4175-b5fd-a1a07970cf0d\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-4gllz" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.321327 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/782feb3b-5fe4-413e-87a0-9602f412897e-image-import-ca\") pod \"apiserver-76f77b778f-rkmxh\" (UID: \"782feb3b-5fe4-413e-87a0-9602f412897e\") " pod="openshift-apiserver/apiserver-76f77b778f-rkmxh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.321345 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g7jlp\" (UniqueName: \"kubernetes.io/projected/0f4f139a-e961-4510-9a82-a2de30587b6f-kube-api-access-g7jlp\") pod \"openshift-apiserver-operator-796bbdcf4f-k49xj\" (UID: \"0f4f139a-e961-4510-9a82-a2de30587b6f\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-k49xj" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.321363 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/12c0356d-b61a-47e3-a93d-8d2d743cc9b5-service-ca-bundle\") pod \"authentication-operator-69f744f599-rnr95\" (UID: \"12c0356d-b61a-47e3-a93d-8d2d743cc9b5\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rnr95" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.321395 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/c3117f4b-6f3a-4131-9001-d39222e6f268-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-kzdfh\" (UID: \"c3117f4b-6f3a-4131-9001-d39222e6f268\") " pod="openshift-authentication/oauth-openshift-558db77b4-kzdfh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.321415 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1e11d89c-e71a-4a17-b1dd-da3883753fde-serving-cert\") pod \"console-operator-58897d9998-64nzp\" (UID: \"1e11d89c-e71a-4a17-b1dd-da3883753fde\") " pod="openshift-console-operator/console-operator-58897d9998-64nzp" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.321432 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-psmlg\" (UniqueName: \"kubernetes.io/projected/48038042-7b0f-48d9-9f90-6c0b9dd179d6-kube-api-access-psmlg\") pod \"console-f9d7485db-9c622\" (UID: \"48038042-7b0f-48d9-9f90-6c0b9dd179d6\") " pod="openshift-console/console-f9d7485db-9c622" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.321450 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0f4f139a-e961-4510-9a82-a2de30587b6f-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-k49xj\" (UID: \"0f4f139a-e961-4510-9a82-a2de30587b6f\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-k49xj" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.321475 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/48038042-7b0f-48d9-9f90-6c0b9dd179d6-console-config\") pod \"console-f9d7485db-9c622\" (UID: \"48038042-7b0f-48d9-9f90-6c0b9dd179d6\") " pod="openshift-console/console-f9d7485db-9c622" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.321503 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/c3117f4b-6f3a-4131-9001-d39222e6f268-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-kzdfh\" (UID: \"c3117f4b-6f3a-4131-9001-d39222e6f268\") " pod="openshift-authentication/oauth-openshift-558db77b4-kzdfh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.321528 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/c3117f4b-6f3a-4131-9001-d39222e6f268-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-kzdfh\" (UID: \"c3117f4b-6f3a-4131-9001-d39222e6f268\") " pod="openshift-authentication/oauth-openshift-558db77b4-kzdfh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.321550 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/12c0356d-b61a-47e3-a93d-8d2d743cc9b5-serving-cert\") pod \"authentication-operator-69f744f599-rnr95\" (UID: \"12c0356d-b61a-47e3-a93d-8d2d743cc9b5\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rnr95" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.321567 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0aa048c9-05f2-41f3-ad26-c88d5c98c453-config\") pod \"route-controller-manager-6576b87f9c-7zfzd\" (UID: \"0aa048c9-05f2-41f3-ad26-c88d5c98c453\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-7zfzd" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.321583 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/002aa1a9-0253-4b17-8c8c-d23c830c46cc-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-jwgnx\" (UID: \"002aa1a9-0253-4b17-8c8c-d23c830c46cc\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jwgnx" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.321599 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/73058959-ba27-4d94-8662-a213a53113e9-auth-proxy-config\") pod \"machine-approver-56656f9798-44d5b\" (UID: \"73058959-ba27-4d94-8662-a213a53113e9\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-44d5b" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.321617 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/782feb3b-5fe4-413e-87a0-9602f412897e-trusted-ca-bundle\") pod \"apiserver-76f77b778f-rkmxh\" (UID: \"782feb3b-5fe4-413e-87a0-9602f412897e\") " pod="openshift-apiserver/apiserver-76f77b778f-rkmxh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.321633 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2jd75\" (UniqueName: \"kubernetes.io/projected/782feb3b-5fe4-413e-87a0-9602f412897e-kube-api-access-2jd75\") pod \"apiserver-76f77b778f-rkmxh\" (UID: \"782feb3b-5fe4-413e-87a0-9602f412897e\") " pod="openshift-apiserver/apiserver-76f77b778f-rkmxh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.321650 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bcea690d-529b-4175-b5fd-a1a07970cf0d-serving-cert\") pod \"openshift-config-operator-7777fb866f-4gllz\" (UID: \"bcea690d-529b-4175-b5fd-a1a07970cf0d\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-4gllz" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.321666 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/915c88c7-cac2-48b3-ab7a-6e23e7240465-client-ca\") pod \"controller-manager-879f6c89f-b7687\" (UID: \"915c88c7-cac2-48b3-ab7a-6e23e7240465\") " pod="openshift-controller-manager/controller-manager-879f6c89f-b7687" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.321682 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/002aa1a9-0253-4b17-8c8c-d23c830c46cc-config\") pod \"kube-apiserver-operator-766d6c64bb-jwgnx\" (UID: \"002aa1a9-0253-4b17-8c8c-d23c830c46cc\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jwgnx" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.322382 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/002aa1a9-0253-4b17-8c8c-d23c830c46cc-config\") pod \"kube-apiserver-operator-766d6c64bb-jwgnx\" (UID: \"002aa1a9-0253-4b17-8c8c-d23c830c46cc\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jwgnx" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.322409 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/40a88e42-a0d7-4703-a3f8-25f524f90eca-audit-dir\") pod \"apiserver-7bbb656c7d-zdzdl\" (UID: \"40a88e42-a0d7-4703-a3f8-25f524f90eca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zdzdl" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.322573 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/782feb3b-5fe4-413e-87a0-9602f412897e-audit-dir\") pod \"apiserver-76f77b778f-rkmxh\" (UID: \"782feb3b-5fe4-413e-87a0-9602f412897e\") " pod="openshift-apiserver/apiserver-76f77b778f-rkmxh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.322914 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.323302 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/12c0356d-b61a-47e3-a93d-8d2d743cc9b5-config\") pod \"authentication-operator-69f744f599-rnr95\" (UID: \"12c0356d-b61a-47e3-a93d-8d2d743cc9b5\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rnr95" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.323537 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e11d89c-e71a-4a17-b1dd-da3883753fde-config\") pod \"console-operator-58897d9998-64nzp\" (UID: \"1e11d89c-e71a-4a17-b1dd-da3883753fde\") " pod="openshift-console-operator/console-operator-58897d9998-64nzp" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.323603 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0aa048c9-05f2-41f3-ad26-c88d5c98c453-client-ca\") pod \"route-controller-manager-6576b87f9c-7zfzd\" (UID: \"0aa048c9-05f2-41f3-ad26-c88d5c98c453\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-7zfzd" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.323823 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3d179b2f-6775-4d3a-be36-3960799428db-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-nhbkn\" (UID: \"3d179b2f-6775-4d3a-be36-3960799428db\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nhbkn" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.323994 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/782feb3b-5fe4-413e-87a0-9602f412897e-config\") pod \"apiserver-76f77b778f-rkmxh\" (UID: \"782feb3b-5fe4-413e-87a0-9602f412897e\") " pod="openshift-apiserver/apiserver-76f77b778f-rkmxh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.324016 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/1e11d89c-e71a-4a17-b1dd-da3883753fde-trusted-ca\") pod \"console-operator-58897d9998-64nzp\" (UID: \"1e11d89c-e71a-4a17-b1dd-da3883753fde\") " pod="openshift-console-operator/console-operator-58897d9998-64nzp" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.320122 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/782feb3b-5fe4-413e-87a0-9602f412897e-node-pullsecrets\") pod \"apiserver-76f77b778f-rkmxh\" (UID: \"782feb3b-5fe4-413e-87a0-9602f412897e\") " pod="openshift-apiserver/apiserver-76f77b778f-rkmxh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.324172 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/48038042-7b0f-48d9-9f90-6c0b9dd179d6-service-ca\") pod \"console-f9d7485db-9c622\" (UID: \"48038042-7b0f-48d9-9f90-6c0b9dd179d6\") " pod="openshift-console/console-f9d7485db-9c622" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.324670 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/c3117f4b-6f3a-4131-9001-d39222e6f268-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-kzdfh\" (UID: \"c3117f4b-6f3a-4131-9001-d39222e6f268\") " pod="openshift-authentication/oauth-openshift-558db77b4-kzdfh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.325048 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/3d179b2f-6775-4d3a-be36-3960799428db-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-nhbkn\" (UID: \"3d179b2f-6775-4d3a-be36-3960799428db\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nhbkn" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.325138 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/c3117f4b-6f3a-4131-9001-d39222e6f268-audit-dir\") pod \"oauth-openshift-558db77b4-kzdfh\" (UID: \"c3117f4b-6f3a-4131-9001-d39222e6f268\") " pod="openshift-authentication/oauth-openshift-558db77b4-kzdfh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.325780 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/782feb3b-5fe4-413e-87a0-9602f412897e-encryption-config\") pod \"apiserver-76f77b778f-rkmxh\" (UID: \"782feb3b-5fe4-413e-87a0-9602f412897e\") " pod="openshift-apiserver/apiserver-76f77b778f-rkmxh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.326564 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bcea690d-529b-4175-b5fd-a1a07970cf0d-available-featuregates\") pod \"openshift-config-operator-7777fb866f-4gllz\" (UID: \"bcea690d-529b-4175-b5fd-a1a07970cf0d\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-4gllz" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.326446 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/782feb3b-5fe4-413e-87a0-9602f412897e-audit\") pod \"apiserver-76f77b778f-rkmxh\" (UID: \"782feb3b-5fe4-413e-87a0-9602f412897e\") " pod="openshift-apiserver/apiserver-76f77b778f-rkmxh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.327275 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/782feb3b-5fe4-413e-87a0-9602f412897e-etcd-serving-ca\") pod \"apiserver-76f77b778f-rkmxh\" (UID: \"782feb3b-5fe4-413e-87a0-9602f412897e\") " pod="openshift-apiserver/apiserver-76f77b778f-rkmxh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.327296 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/48038042-7b0f-48d9-9f90-6c0b9dd179d6-oauth-serving-cert\") pod \"console-f9d7485db-9c622\" (UID: \"48038042-7b0f-48d9-9f90-6c0b9dd179d6\") " pod="openshift-console/console-f9d7485db-9c622" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.327308 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/c3117f4b-6f3a-4131-9001-d39222e6f268-audit-policies\") pod \"oauth-openshift-558db77b4-kzdfh\" (UID: \"c3117f4b-6f3a-4131-9001-d39222e6f268\") " pod="openshift-authentication/oauth-openshift-558db77b4-kzdfh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.327769 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/782feb3b-5fe4-413e-87a0-9602f412897e-image-import-ca\") pod \"apiserver-76f77b778f-rkmxh\" (UID: \"782feb3b-5fe4-413e-87a0-9602f412897e\") " pod="openshift-apiserver/apiserver-76f77b778f-rkmxh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.327943 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/48038042-7b0f-48d9-9f90-6c0b9dd179d6-console-oauth-config\") pod \"console-f9d7485db-9c622\" (UID: \"48038042-7b0f-48d9-9f90-6c0b9dd179d6\") " pod="openshift-console/console-f9d7485db-9c622" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.327995 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/72117afc-2e5f-4696-b515-76aced63c30f-metrics-tls\") pod \"ingress-operator-5b745b69d9-q2bmk\" (UID: \"72117afc-2e5f-4696-b515-76aced63c30f\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-q2bmk" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.328228 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/5cd4f96b-673e-4518-a8ee-da3ccb7a86b0-images\") pod \"machine-api-operator-5694c8668f-vx9s2\" (UID: \"5cd4f96b-673e-4518-a8ee-da3ccb7a86b0\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-vx9s2" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.328237 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5cd4f96b-673e-4518-a8ee-da3ccb7a86b0-config\") pod \"machine-api-operator-5694c8668f-vx9s2\" (UID: \"5cd4f96b-673e-4518-a8ee-da3ccb7a86b0\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-vx9s2" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.328569 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/915c88c7-cac2-48b3-ab7a-6e23e7240465-config\") pod \"controller-manager-879f6c89f-b7687\" (UID: \"915c88c7-cac2-48b3-ab7a-6e23e7240465\") " pod="openshift-controller-manager/controller-manager-879f6c89f-b7687" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.329587 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/5cd4f96b-673e-4518-a8ee-da3ccb7a86b0-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-vx9s2\" (UID: \"5cd4f96b-673e-4518-a8ee-da3ccb7a86b0\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-vx9s2" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.329651 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/915c88c7-cac2-48b3-ab7a-6e23e7240465-client-ca\") pod \"controller-manager-879f6c89f-b7687\" (UID: \"915c88c7-cac2-48b3-ab7a-6e23e7240465\") " pod="openshift-controller-manager/controller-manager-879f6c89f-b7687" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.329795 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0aa048c9-05f2-41f3-ad26-c88d5c98c453-config\") pod \"route-controller-manager-6576b87f9c-7zfzd\" (UID: \"0aa048c9-05f2-41f3-ad26-c88d5c98c453\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-7zfzd" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.329888 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/915c88c7-cac2-48b3-ab7a-6e23e7240465-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-b7687\" (UID: \"915c88c7-cac2-48b3-ab7a-6e23e7240465\") " pod="openshift-controller-manager/controller-manager-879f6c89f-b7687" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.329906 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/782feb3b-5fe4-413e-87a0-9602f412897e-serving-cert\") pod \"apiserver-76f77b778f-rkmxh\" (UID: \"782feb3b-5fe4-413e-87a0-9602f412897e\") " pod="openshift-apiserver/apiserver-76f77b778f-rkmxh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.330121 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/c3117f4b-6f3a-4131-9001-d39222e6f268-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-kzdfh\" (UID: \"c3117f4b-6f3a-4131-9001-d39222e6f268\") " pod="openshift-authentication/oauth-openshift-558db77b4-kzdfh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.330588 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/73058959-ba27-4d94-8662-a213a53113e9-machine-approver-tls\") pod \"machine-approver-56656f9798-44d5b\" (UID: \"73058959-ba27-4d94-8662-a213a53113e9\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-44d5b" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.331055 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/73058959-ba27-4d94-8662-a213a53113e9-config\") pod \"machine-approver-56656f9798-44d5b\" (UID: \"73058959-ba27-4d94-8662-a213a53113e9\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-44d5b" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.331270 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/73058959-ba27-4d94-8662-a213a53113e9-auth-proxy-config\") pod \"machine-approver-56656f9798-44d5b\" (UID: \"73058959-ba27-4d94-8662-a213a53113e9\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-44d5b" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.331441 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/48038042-7b0f-48d9-9f90-6c0b9dd179d6-console-config\") pod \"console-f9d7485db-9c622\" (UID: \"48038042-7b0f-48d9-9f90-6c0b9dd179d6\") " pod="openshift-console/console-f9d7485db-9c622" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.331618 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/48038042-7b0f-48d9-9f90-6c0b9dd179d6-trusted-ca-bundle\") pod \"console-f9d7485db-9c622\" (UID: \"48038042-7b0f-48d9-9f90-6c0b9dd179d6\") " pod="openshift-console/console-f9d7485db-9c622" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.331811 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/12c0356d-b61a-47e3-a93d-8d2d743cc9b5-serving-cert\") pod \"authentication-operator-69f744f599-rnr95\" (UID: \"12c0356d-b61a-47e3-a93d-8d2d743cc9b5\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rnr95" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.331891 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/782feb3b-5fe4-413e-87a0-9602f412897e-etcd-client\") pod \"apiserver-76f77b778f-rkmxh\" (UID: \"782feb3b-5fe4-413e-87a0-9602f412897e\") " pod="openshift-apiserver/apiserver-76f77b778f-rkmxh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.331964 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/72117afc-2e5f-4696-b515-76aced63c30f-trusted-ca\") pod \"ingress-operator-5b745b69d9-q2bmk\" (UID: \"72117afc-2e5f-4696-b515-76aced63c30f\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-q2bmk" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.332000 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/12c0356d-b61a-47e3-a93d-8d2d743cc9b5-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-rnr95\" (UID: \"12c0356d-b61a-47e3-a93d-8d2d743cc9b5\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rnr95" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.332511 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/12c0356d-b61a-47e3-a93d-8d2d743cc9b5-service-ca-bundle\") pod \"authentication-operator-69f744f599-rnr95\" (UID: \"12c0356d-b61a-47e3-a93d-8d2d743cc9b5\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rnr95" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.332916 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/c3117f4b-6f3a-4131-9001-d39222e6f268-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-kzdfh\" (UID: \"c3117f4b-6f3a-4131-9001-d39222e6f268\") " pod="openshift-authentication/oauth-openshift-558db77b4-kzdfh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.332945 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/782feb3b-5fe4-413e-87a0-9602f412897e-trusted-ca-bundle\") pod \"apiserver-76f77b778f-rkmxh\" (UID: \"782feb3b-5fe4-413e-87a0-9602f412897e\") " pod="openshift-apiserver/apiserver-76f77b778f-rkmxh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.333303 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/c3117f4b-6f3a-4131-9001-d39222e6f268-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-kzdfh\" (UID: \"c3117f4b-6f3a-4131-9001-d39222e6f268\") " pod="openshift-authentication/oauth-openshift-558db77b4-kzdfh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.333314 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0f4f139a-e961-4510-9a82-a2de30587b6f-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-k49xj\" (UID: \"0f4f139a-e961-4510-9a82-a2de30587b6f\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-k49xj" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.333350 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bcea690d-529b-4175-b5fd-a1a07970cf0d-serving-cert\") pod \"openshift-config-operator-7777fb866f-4gllz\" (UID: \"bcea690d-529b-4175-b5fd-a1a07970cf0d\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-4gllz" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.333376 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/c3117f4b-6f3a-4131-9001-d39222e6f268-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-kzdfh\" (UID: \"c3117f4b-6f3a-4131-9001-d39222e6f268\") " pod="openshift-authentication/oauth-openshift-558db77b4-kzdfh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.333571 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c3117f4b-6f3a-4131-9001-d39222e6f268-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-kzdfh\" (UID: \"c3117f4b-6f3a-4131-9001-d39222e6f268\") " pod="openshift-authentication/oauth-openshift-558db77b4-kzdfh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.333595 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/c3117f4b-6f3a-4131-9001-d39222e6f268-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-kzdfh\" (UID: \"c3117f4b-6f3a-4131-9001-d39222e6f268\") " pod="openshift-authentication/oauth-openshift-558db77b4-kzdfh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.333633 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/48038042-7b0f-48d9-9f90-6c0b9dd179d6-console-serving-cert\") pod \"console-f9d7485db-9c622\" (UID: \"48038042-7b0f-48d9-9f90-6c0b9dd179d6\") " pod="openshift-console/console-f9d7485db-9c622" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.334004 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/c3117f4b-6f3a-4131-9001-d39222e6f268-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-kzdfh\" (UID: \"c3117f4b-6f3a-4131-9001-d39222e6f268\") " pod="openshift-authentication/oauth-openshift-558db77b4-kzdfh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.334083 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/c3117f4b-6f3a-4131-9001-d39222e6f268-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-kzdfh\" (UID: \"c3117f4b-6f3a-4131-9001-d39222e6f268\") " pod="openshift-authentication/oauth-openshift-558db77b4-kzdfh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.334123 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/40a88e42-a0d7-4703-a3f8-25f524f90eca-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-zdzdl\" (UID: \"40a88e42-a0d7-4703-a3f8-25f524f90eca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zdzdl" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.334250 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/40a88e42-a0d7-4703-a3f8-25f524f90eca-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-zdzdl\" (UID: \"40a88e42-a0d7-4703-a3f8-25f524f90eca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zdzdl" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.334276 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/40a88e42-a0d7-4703-a3f8-25f524f90eca-audit-policies\") pod \"apiserver-7bbb656c7d-zdzdl\" (UID: \"40a88e42-a0d7-4703-a3f8-25f524f90eca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zdzdl" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.334547 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/40a88e42-a0d7-4703-a3f8-25f524f90eca-encryption-config\") pod \"apiserver-7bbb656c7d-zdzdl\" (UID: \"40a88e42-a0d7-4703-a3f8-25f524f90eca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zdzdl" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.334751 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/002aa1a9-0253-4b17-8c8c-d23c830c46cc-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-jwgnx\" (UID: \"002aa1a9-0253-4b17-8c8c-d23c830c46cc\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jwgnx" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.335229 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1e11d89c-e71a-4a17-b1dd-da3883753fde-serving-cert\") pod \"console-operator-58897d9998-64nzp\" (UID: \"1e11d89c-e71a-4a17-b1dd-da3883753fde\") " pod="openshift-console-operator/console-operator-58897d9998-64nzp" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.335320 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0aa048c9-05f2-41f3-ad26-c88d5c98c453-serving-cert\") pod \"route-controller-manager-6576b87f9c-7zfzd\" (UID: \"0aa048c9-05f2-41f3-ad26-c88d5c98c453\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-7zfzd" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.335785 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/40a88e42-a0d7-4703-a3f8-25f524f90eca-etcd-client\") pod \"apiserver-7bbb656c7d-zdzdl\" (UID: \"40a88e42-a0d7-4703-a3f8-25f524f90eca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zdzdl" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.335954 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/915c88c7-cac2-48b3-ab7a-6e23e7240465-serving-cert\") pod \"controller-manager-879f6c89f-b7687\" (UID: \"915c88c7-cac2-48b3-ab7a-6e23e7240465\") " pod="openshift-controller-manager/controller-manager-879f6c89f-b7687" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.336014 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/c3117f4b-6f3a-4131-9001-d39222e6f268-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-kzdfh\" (UID: \"c3117f4b-6f3a-4131-9001-d39222e6f268\") " pod="openshift-authentication/oauth-openshift-558db77b4-kzdfh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.336097 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/c3117f4b-6f3a-4131-9001-d39222e6f268-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-kzdfh\" (UID: \"c3117f4b-6f3a-4131-9001-d39222e6f268\") " pod="openshift-authentication/oauth-openshift-558db77b4-kzdfh" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.336279 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/40a88e42-a0d7-4703-a3f8-25f524f90eca-serving-cert\") pod \"apiserver-7bbb656c7d-zdzdl\" (UID: \"40a88e42-a0d7-4703-a3f8-25f524f90eca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zdzdl" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.360708 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.380245 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.401267 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.421004 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.441163 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.461668 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.482135 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.501412 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.521715 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.540929 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.560577 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.581418 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.600811 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.620939 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.642064 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.661919 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.681843 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.702041 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.720919 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.741046 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.761409 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.780766 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.800516 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.821189 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.841360 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.862475 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.881881 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.901982 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.926914 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.941865 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.961472 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 05 17:06:38 crc kubenswrapper[4753]: I1205 17:06:38.981970 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.003276 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.022610 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.041441 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.061564 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.081677 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.102951 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.122221 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.141796 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.160889 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.181434 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.201507 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.221817 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.239802 4753 request.go:700] Waited for 1.006996104s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-service-ca-operator/secrets?fieldSelector=metadata.name%3Dservice-ca-operator-dockercfg-rg9jl&limit=500&resourceVersion=0 Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.242210 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.261540 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.280799 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.300762 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.321319 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.341666 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.361545 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.381075 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.402034 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.423009 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.442801 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.462523 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.481291 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.501496 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.520314 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.541561 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.561043 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.581145 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.602561 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.629098 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.641043 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.662332 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.681300 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.701818 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.740375 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/abca9c47-e52b-4410-83e1-b486f8f01aca-ca-trust-extracted\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.740423 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s92jj\" (UniqueName: \"kubernetes.io/projected/abca9c47-e52b-4410-83e1-b486f8f01aca-kube-api-access-s92jj\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.740575 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/abca9c47-e52b-4410-83e1-b486f8f01aca-registry-tls\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.740726 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/abca9c47-e52b-4410-83e1-b486f8f01aca-bound-sa-token\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.740856 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/abca9c47-e52b-4410-83e1-b486f8f01aca-installation-pull-secrets\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.740918 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/abca9c47-e52b-4410-83e1-b486f8f01aca-registry-certificates\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.740978 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.741039 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/abca9c47-e52b-4410-83e1-b486f8f01aca-trusted-ca\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:39 crc kubenswrapper[4753]: E1205 17:06:39.741691 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:06:40.241661441 +0000 UTC m=+138.744768477 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4vvd9" (UID: "abca9c47-e52b-4410-83e1-b486f8f01aca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.744050 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.761571 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.782472 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.802602 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.821203 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.841752 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.841767 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.842055 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/783086d1-b0d9-4f35-b3d1-95cd8df517aa-tmpfs\") pod \"packageserver-d55dfcdfc-9j8t7\" (UID: \"783086d1-b0d9-4f35-b3d1-95cd8df517aa\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-9j8t7" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.842133 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8dfb6055-bb8a-4b03-88cb-0ec84a2b1490-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-m9rf5\" (UID: \"8dfb6055-bb8a-4b03-88cb-0ec84a2b1490\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-m9rf5" Dec 05 17:06:39 crc kubenswrapper[4753]: E1205 17:06:39.842352 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:06:40.34233324 +0000 UTC m=+138.845440246 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.842483 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/24e4585c-4850-4101-a40d-b38424860805-registration-dir\") pod \"csi-hostpathplugin-djddj\" (UID: \"24e4585c-4850-4101-a40d-b38424860805\") " pod="hostpath-provisioner/csi-hostpathplugin-djddj" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.842580 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4sgj8\" (UniqueName: \"kubernetes.io/projected/783086d1-b0d9-4f35-b3d1-95cd8df517aa-kube-api-access-4sgj8\") pod \"packageserver-d55dfcdfc-9j8t7\" (UID: \"783086d1-b0d9-4f35-b3d1-95cd8df517aa\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-9j8t7" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.842675 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4l4dt\" (UniqueName: \"kubernetes.io/projected/27067136-5ba5-407b-a4a5-4d1e8c284564-kube-api-access-4l4dt\") pod \"control-plane-machine-set-operator-78cbb6b69f-pcb9b\" (UID: \"27067136-5ba5-407b-a4a5-4d1e8c284564\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-pcb9b" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.842774 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/c9582e4b-c48a-470f-a0f7-9cbad68d972f-etcd-ca\") pod \"etcd-operator-b45778765-lcd59\" (UID: \"c9582e4b-c48a-470f-a0f7-9cbad68d972f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-lcd59" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.843026 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qs7sx\" (UniqueName: \"kubernetes.io/projected/24e4585c-4850-4101-a40d-b38424860805-kube-api-access-qs7sx\") pod \"csi-hostpathplugin-djddj\" (UID: \"24e4585c-4850-4101-a40d-b38424860805\") " pod="hostpath-provisioner/csi-hostpathplugin-djddj" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.843117 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/783086d1-b0d9-4f35-b3d1-95cd8df517aa-apiservice-cert\") pod \"packageserver-d55dfcdfc-9j8t7\" (UID: \"783086d1-b0d9-4f35-b3d1-95cd8df517aa\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-9j8t7" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.843409 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/c9582e4b-c48a-470f-a0f7-9cbad68d972f-etcd-client\") pod \"etcd-operator-b45778765-lcd59\" (UID: \"c9582e4b-c48a-470f-a0f7-9cbad68d972f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-lcd59" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.844190 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/03fd3f93-fb41-4e32-8276-416c40f2b9a7-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-s87ds\" (UID: \"03fd3f93-fb41-4e32-8276-416c40f2b9a7\") " pod="openshift-marketplace/marketplace-operator-79b997595-s87ds" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.844373 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nqr46\" (UniqueName: \"kubernetes.io/projected/c8b527e5-cd18-4b81-aa80-0c7004486286-kube-api-access-nqr46\") pod \"package-server-manager-789f6589d5-mjbr2\" (UID: \"c8b527e5-cd18-4b81-aa80-0c7004486286\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mjbr2" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.844524 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fkr5c\" (UniqueName: \"kubernetes.io/projected/86d71fb0-ec2b-4606-bb01-0e88f42b572b-kube-api-access-fkr5c\") pod \"migrator-59844c95c7-rvmt6\" (UID: \"86d71fb0-ec2b-4606-bb01-0e88f42b572b\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-rvmt6" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.844557 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3f2ff5f2-b8e0-4d37-80df-3ff278b01d0a-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-wqn7l\" (UID: \"3f2ff5f2-b8e0-4d37-80df-3ff278b01d0a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-wqn7l" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.844583 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c9582e4b-c48a-470f-a0f7-9cbad68d972f-serving-cert\") pod \"etcd-operator-b45778765-lcd59\" (UID: \"c9582e4b-c48a-470f-a0f7-9cbad68d972f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-lcd59" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.844615 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/abca9c47-e52b-4410-83e1-b486f8f01aca-installation-pull-secrets\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.844677 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8dfb6055-bb8a-4b03-88cb-0ec84a2b1490-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-m9rf5\" (UID: \"8dfb6055-bb8a-4b03-88cb-0ec84a2b1490\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-m9rf5" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.844749 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jwnb8\" (UniqueName: \"kubernetes.io/projected/c18256b2-4287-4df0-8819-201cf14c6380-kube-api-access-jwnb8\") pod \"collect-profiles-29415900-2lmxf\" (UID: \"c18256b2-4287-4df0-8819-201cf14c6380\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415900-2lmxf" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.844874 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/cf24b057-f212-46d9-a3f8-0ef08669940d-metrics-tls\") pod \"dns-default-xhb8z\" (UID: \"cf24b057-f212-46d9-a3f8-0ef08669940d\") " pod="openshift-dns/dns-default-xhb8z" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.845044 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bb0b91f2-5d2a-4bd3-92da-6573800548ff-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-tfkgs\" (UID: \"bb0b91f2-5d2a-4bd3-92da-6573800548ff\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tfkgs" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.845124 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fc777\" (UniqueName: \"kubernetes.io/projected/6b4d13e0-6218-444d-822b-656abbf0b80c-kube-api-access-fc777\") pod \"machine-config-operator-74547568cd-55drw\" (UID: \"6b4d13e0-6218-444d-822b-656abbf0b80c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-55drw" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.845186 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zcnps\" (UniqueName: \"kubernetes.io/projected/1b6a5e8c-2aea-47e2-802c-8604814dbf18-kube-api-access-zcnps\") pod \"machine-config-server-4ctlc\" (UID: \"1b6a5e8c-2aea-47e2-802c-8604814dbf18\") " pod="openshift-machine-config-operator/machine-config-server-4ctlc" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.845263 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/4bad5655-05a5-4081-868b-3d2e69df620f-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-wk9v7\" (UID: \"4bad5655-05a5-4081-868b-3d2e69df620f\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-wk9v7" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.846577 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.846758 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6b4d13e0-6218-444d-822b-656abbf0b80c-images\") pod \"machine-config-operator-74547568cd-55drw\" (UID: \"6b4d13e0-6218-444d-822b-656abbf0b80c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-55drw" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.846843 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fcjns\" (UniqueName: \"kubernetes.io/projected/c81db375-2956-49d8-856b-90395544e758-kube-api-access-fcjns\") pod \"multus-admission-controller-857f4d67dd-tqddg\" (UID: \"c81db375-2956-49d8-856b-90395544e758\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-tqddg" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.846944 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c18256b2-4287-4df0-8819-201cf14c6380-config-volume\") pod \"collect-profiles-29415900-2lmxf\" (UID: \"c18256b2-4287-4df0-8819-201cf14c6380\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415900-2lmxf" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.846985 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/c81db375-2956-49d8-856b-90395544e758-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-tqddg\" (UID: \"c81db375-2956-49d8-856b-90395544e758\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-tqddg" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.847018 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/c9582e4b-c48a-470f-a0f7-9cbad68d972f-etcd-service-ca\") pod \"etcd-operator-b45778765-lcd59\" (UID: \"c9582e4b-c48a-470f-a0f7-9cbad68d972f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-lcd59" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.847065 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/abca9c47-e52b-4410-83e1-b486f8f01aca-trusted-ca\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:39 crc kubenswrapper[4753]: E1205 17:06:39.847109 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:06:40.34708277 +0000 UTC m=+138.850189796 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4vvd9" (UID: "abca9c47-e52b-4410-83e1-b486f8f01aca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.847216 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f69r6\" (UniqueName: \"kubernetes.io/projected/660abb91-a2d0-4cf3-b0d0-073b1cccdf37-kube-api-access-f69r6\") pod \"catalog-operator-68c6474976-flncn\" (UID: \"660abb91-a2d0-4cf3-b0d0-073b1cccdf37\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-flncn" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.847814 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l2phx\" (UniqueName: \"kubernetes.io/projected/667c9562-fe34-4ad9-9150-43613978c0d8-kube-api-access-l2phx\") pod \"service-ca-9c57cc56f-hq6h7\" (UID: \"667c9562-fe34-4ad9-9150-43613978c0d8\") " pod="openshift-service-ca/service-ca-9c57cc56f-hq6h7" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.848193 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/d4774a0f-e7ca-49bf-a25e-c2a88a7cdc3b-proxy-tls\") pod \"machine-config-controller-84d6567774-22ncf\" (UID: \"d4774a0f-e7ca-49bf-a25e-c2a88a7cdc3b\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-22ncf" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.848762 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s92jj\" (UniqueName: \"kubernetes.io/projected/abca9c47-e52b-4410-83e1-b486f8f01aca-kube-api-access-s92jj\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.848875 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wvmmb\" (UniqueName: \"kubernetes.io/projected/bb0b91f2-5d2a-4bd3-92da-6573800548ff-kube-api-access-wvmmb\") pod \"openshift-controller-manager-operator-756b6f6bc6-tfkgs\" (UID: \"bb0b91f2-5d2a-4bd3-92da-6573800548ff\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tfkgs" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.848972 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tgb84\" (UniqueName: \"kubernetes.io/projected/d4774a0f-e7ca-49bf-a25e-c2a88a7cdc3b-kube-api-access-tgb84\") pod \"machine-config-controller-84d6567774-22ncf\" (UID: \"d4774a0f-e7ca-49bf-a25e-c2a88a7cdc3b\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-22ncf" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.849014 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cbcd15e6-1457-4722-b319-13651ca7598b-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-69bc9\" (UID: \"cbcd15e6-1457-4722-b319-13651ca7598b\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-69bc9" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.849084 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/660abb91-a2d0-4cf3-b0d0-073b1cccdf37-profile-collector-cert\") pod \"catalog-operator-68c6474976-flncn\" (UID: \"660abb91-a2d0-4cf3-b0d0-073b1cccdf37\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-flncn" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.849299 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/de65df6c-3a9a-4041-a67c-e6cbd766b4b2-service-ca-bundle\") pod \"router-default-5444994796-9tzjp\" (UID: \"de65df6c-3a9a-4041-a67c-e6cbd766b4b2\") " pod="openshift-ingress/router-default-5444994796-9tzjp" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.849495 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/1b6a5e8c-2aea-47e2-802c-8604814dbf18-node-bootstrap-token\") pod \"machine-config-server-4ctlc\" (UID: \"1b6a5e8c-2aea-47e2-802c-8604814dbf18\") " pod="openshift-machine-config-operator/machine-config-server-4ctlc" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.849769 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/27067136-5ba5-407b-a4a5-4d1e8c284564-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-pcb9b\" (UID: \"27067136-5ba5-407b-a4a5-4d1e8c284564\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-pcb9b" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.850025 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/783086d1-b0d9-4f35-b3d1-95cd8df517aa-webhook-cert\") pod \"packageserver-d55dfcdfc-9j8t7\" (UID: \"783086d1-b0d9-4f35-b3d1-95cd8df517aa\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-9j8t7" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.850496 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/abca9c47-e52b-4410-83e1-b486f8f01aca-registry-tls\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.850605 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/5881adf5-7728-46d0-b449-b0d8b0d77c7d-profile-collector-cert\") pod \"olm-operator-6b444d44fb-w58zz\" (UID: \"5881adf5-7728-46d0-b449-b0d8b0d77c7d\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-w58zz" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.851445 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/03fd3f93-fb41-4e32-8276-416c40f2b9a7-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-s87ds\" (UID: \"03fd3f93-fb41-4e32-8276-416c40f2b9a7\") " pod="openshift-marketplace/marketplace-operator-79b997595-s87ds" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.851486 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3f2ff5f2-b8e0-4d37-80df-3ff278b01d0a-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-wqn7l\" (UID: \"3f2ff5f2-b8e0-4d37-80df-3ff278b01d0a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-wqn7l" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.851557 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rg5lz\" (UniqueName: \"kubernetes.io/projected/de65df6c-3a9a-4041-a67c-e6cbd766b4b2-kube-api-access-rg5lz\") pod \"router-default-5444994796-9tzjp\" (UID: \"de65df6c-3a9a-4041-a67c-e6cbd766b4b2\") " pod="openshift-ingress/router-default-5444994796-9tzjp" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.852054 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/5881adf5-7728-46d0-b449-b0d8b0d77c7d-srv-cert\") pod \"olm-operator-6b444d44fb-w58zz\" (UID: \"5881adf5-7728-46d0-b449-b0d8b0d77c7d\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-w58zz" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.852289 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/abca9c47-e52b-4410-83e1-b486f8f01aca-installation-pull-secrets\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.852331 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/24e4585c-4850-4101-a40d-b38424860805-csi-data-dir\") pod \"csi-hostpathplugin-djddj\" (UID: \"24e4585c-4850-4101-a40d-b38424860805\") " pod="hostpath-provisioner/csi-hostpathplugin-djddj" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.852418 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hd4x2\" (UniqueName: \"kubernetes.io/projected/cbcd15e6-1457-4722-b319-13651ca7598b-kube-api-access-hd4x2\") pod \"kube-storage-version-migrator-operator-b67b599dd-69bc9\" (UID: \"cbcd15e6-1457-4722-b319-13651ca7598b\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-69bc9" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.852879 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/abca9c47-e52b-4410-83e1-b486f8f01aca-bound-sa-token\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.853382 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bb0b91f2-5d2a-4bd3-92da-6573800548ff-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-tfkgs\" (UID: \"bb0b91f2-5d2a-4bd3-92da-6573800548ff\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tfkgs" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.853558 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/cf24b057-f212-46d9-a3f8-0ef08669940d-config-volume\") pod \"dns-default-xhb8z\" (UID: \"cf24b057-f212-46d9-a3f8-0ef08669940d\") " pod="openshift-dns/dns-default-xhb8z" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.853679 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6stz8\" (UniqueName: \"kubernetes.io/projected/5881adf5-7728-46d0-b449-b0d8b0d77c7d-kube-api-access-6stz8\") pod \"olm-operator-6b444d44fb-w58zz\" (UID: \"5881adf5-7728-46d0-b449-b0d8b0d77c7d\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-w58zz" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.853863 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/abca9c47-e52b-4410-83e1-b486f8f01aca-registry-certificates\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.854180 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/c8b527e5-cd18-4b81-aa80-0c7004486286-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-mjbr2\" (UID: \"c8b527e5-cd18-4b81-aa80-0c7004486286\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mjbr2" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.854284 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8dfb6055-bb8a-4b03-88cb-0ec84a2b1490-config\") pod \"kube-controller-manager-operator-78b949d7b-m9rf5\" (UID: \"8dfb6055-bb8a-4b03-88cb-0ec84a2b1490\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-m9rf5" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.854319 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-znzwq\" (UniqueName: \"kubernetes.io/projected/07ba614e-886f-423d-8ddf-5b727931f58b-kube-api-access-znzwq\") pod \"ingress-canary-wxsmf\" (UID: \"07ba614e-886f-423d-8ddf-5b727931f58b\") " pod="openshift-ingress-canary/ingress-canary-wxsmf" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.854357 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/de65df6c-3a9a-4041-a67c-e6cbd766b4b2-metrics-certs\") pod \"router-default-5444994796-9tzjp\" (UID: \"de65df6c-3a9a-4041-a67c-e6cbd766b4b2\") " pod="openshift-ingress/router-default-5444994796-9tzjp" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.854526 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/24e4585c-4850-4101-a40d-b38424860805-socket-dir\") pod \"csi-hostpathplugin-djddj\" (UID: \"24e4585c-4850-4101-a40d-b38424860805\") " pod="hostpath-provisioner/csi-hostpathplugin-djddj" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.854566 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/667c9562-fe34-4ad9-9150-43613978c0d8-signing-cabundle\") pod \"service-ca-9c57cc56f-hq6h7\" (UID: \"667c9562-fe34-4ad9-9150-43613978c0d8\") " pod="openshift-service-ca/service-ca-9c57cc56f-hq6h7" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.854601 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/24e4585c-4850-4101-a40d-b38424860805-mountpoint-dir\") pod \"csi-hostpathplugin-djddj\" (UID: \"24e4585c-4850-4101-a40d-b38424860805\") " pod="hostpath-provisioner/csi-hostpathplugin-djddj" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.854635 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/07ba614e-886f-423d-8ddf-5b727931f58b-cert\") pod \"ingress-canary-wxsmf\" (UID: \"07ba614e-886f-423d-8ddf-5b727931f58b\") " pod="openshift-ingress-canary/ingress-canary-wxsmf" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.854769 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/abca9c47-e52b-4410-83e1-b486f8f01aca-trusted-ca\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.854849 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cbcd15e6-1457-4722-b319-13651ca7598b-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-69bc9\" (UID: \"cbcd15e6-1457-4722-b319-13651ca7598b\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-69bc9" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.854925 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/24e4585c-4850-4101-a40d-b38424860805-plugins-dir\") pod \"csi-hostpathplugin-djddj\" (UID: \"24e4585c-4850-4101-a40d-b38424860805\") " pod="hostpath-provisioner/csi-hostpathplugin-djddj" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.855001 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e87071bd-a323-4f69-89a9-7c2a37bb27a4-config\") pod \"service-ca-operator-777779d784-hqm52\" (UID: \"e87071bd-a323-4f69-89a9-7c2a37bb27a4\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-hqm52" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.855038 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6rqfc\" (UniqueName: \"kubernetes.io/projected/e87071bd-a323-4f69-89a9-7c2a37bb27a4-kube-api-access-6rqfc\") pod \"service-ca-operator-777779d784-hqm52\" (UID: \"e87071bd-a323-4f69-89a9-7c2a37bb27a4\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-hqm52" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.855071 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/660abb91-a2d0-4cf3-b0d0-073b1cccdf37-srv-cert\") pod \"catalog-operator-68c6474976-flncn\" (UID: \"660abb91-a2d0-4cf3-b0d0-073b1cccdf37\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-flncn" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.855171 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/1b6a5e8c-2aea-47e2-802c-8604814dbf18-certs\") pod \"machine-config-server-4ctlc\" (UID: \"1b6a5e8c-2aea-47e2-802c-8604814dbf18\") " pod="openshift-machine-config-operator/machine-config-server-4ctlc" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.855340 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f2ff5f2-b8e0-4d37-80df-3ff278b01d0a-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-wqn7l\" (UID: \"3f2ff5f2-b8e0-4d37-80df-3ff278b01d0a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-wqn7l" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.855414 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e87071bd-a323-4f69-89a9-7c2a37bb27a4-serving-cert\") pod \"service-ca-operator-777779d784-hqm52\" (UID: \"e87071bd-a323-4f69-89a9-7c2a37bb27a4\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-hqm52" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.855554 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/6b4d13e0-6218-444d-822b-656abbf0b80c-proxy-tls\") pod \"machine-config-operator-74547568cd-55drw\" (UID: \"6b4d13e0-6218-444d-822b-656abbf0b80c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-55drw" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.855644 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c18256b2-4287-4df0-8819-201cf14c6380-secret-volume\") pod \"collect-profiles-29415900-2lmxf\" (UID: \"c18256b2-4287-4df0-8819-201cf14c6380\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415900-2lmxf" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.856948 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x9vg6\" (UniqueName: \"kubernetes.io/projected/92cd61ad-2356-4854-ad74-bde03a83abf0-kube-api-access-x9vg6\") pod \"dns-operator-744455d44c-hzrmq\" (UID: \"92cd61ad-2356-4854-ad74-bde03a83abf0\") " pod="openshift-dns-operator/dns-operator-744455d44c-hzrmq" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.857070 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/d4774a0f-e7ca-49bf-a25e-c2a88a7cdc3b-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-22ncf\" (UID: \"d4774a0f-e7ca-49bf-a25e-c2a88a7cdc3b\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-22ncf" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.861213 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/abca9c47-e52b-4410-83e1-b486f8f01aca-registry-certificates\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.862226 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/abca9c47-e52b-4410-83e1-b486f8f01aca-ca-trust-extracted\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.862385 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ndp9z\" (UniqueName: \"kubernetes.io/projected/cf24b057-f212-46d9-a3f8-0ef08669940d-kube-api-access-ndp9z\") pod \"dns-default-xhb8z\" (UID: \"cf24b057-f212-46d9-a3f8-0ef08669940d\") " pod="openshift-dns/dns-default-xhb8z" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.862449 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/667c9562-fe34-4ad9-9150-43613978c0d8-signing-key\") pod \"service-ca-9c57cc56f-hq6h7\" (UID: \"667c9562-fe34-4ad9-9150-43613978c0d8\") " pod="openshift-service-ca/service-ca-9c57cc56f-hq6h7" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.862487 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x92ch\" (UniqueName: \"kubernetes.io/projected/4bad5655-05a5-4081-868b-3d2e69df620f-kube-api-access-x92ch\") pod \"cluster-samples-operator-665b6dd947-wk9v7\" (UID: \"4bad5655-05a5-4081-868b-3d2e69df620f\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-wk9v7" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.862559 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c9582e4b-c48a-470f-a0f7-9cbad68d972f-config\") pod \"etcd-operator-b45778765-lcd59\" (UID: \"c9582e4b-c48a-470f-a0f7-9cbad68d972f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-lcd59" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.862594 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5r9xp\" (UniqueName: \"kubernetes.io/projected/c9582e4b-c48a-470f-a0f7-9cbad68d972f-kube-api-access-5r9xp\") pod \"etcd-operator-b45778765-lcd59\" (UID: \"c9582e4b-c48a-470f-a0f7-9cbad68d972f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-lcd59" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.862621 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/de65df6c-3a9a-4041-a67c-e6cbd766b4b2-stats-auth\") pod \"router-default-5444994796-9tzjp\" (UID: \"de65df6c-3a9a-4041-a67c-e6cbd766b4b2\") " pod="openshift-ingress/router-default-5444994796-9tzjp" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.862674 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/6b4d13e0-6218-444d-822b-656abbf0b80c-auth-proxy-config\") pod \"machine-config-operator-74547568cd-55drw\" (UID: \"6b4d13e0-6218-444d-822b-656abbf0b80c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-55drw" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.862703 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/92cd61ad-2356-4854-ad74-bde03a83abf0-metrics-tls\") pod \"dns-operator-744455d44c-hzrmq\" (UID: \"92cd61ad-2356-4854-ad74-bde03a83abf0\") " pod="openshift-dns-operator/dns-operator-744455d44c-hzrmq" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.862734 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/de65df6c-3a9a-4041-a67c-e6cbd766b4b2-default-certificate\") pod \"router-default-5444994796-9tzjp\" (UID: \"de65df6c-3a9a-4041-a67c-e6cbd766b4b2\") " pod="openshift-ingress/router-default-5444994796-9tzjp" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.862799 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rphmt\" (UniqueName: \"kubernetes.io/projected/03fd3f93-fb41-4e32-8276-416c40f2b9a7-kube-api-access-rphmt\") pod \"marketplace-operator-79b997595-s87ds\" (UID: \"03fd3f93-fb41-4e32-8276-416c40f2b9a7\") " pod="openshift-marketplace/marketplace-operator-79b997595-s87ds" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.862982 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.863634 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/abca9c47-e52b-4410-83e1-b486f8f01aca-ca-trust-extracted\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.867128 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/abca9c47-e52b-4410-83e1-b486f8f01aca-registry-tls\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.881265 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.902016 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.921096 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.940864 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.962484 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.963760 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:06:39 crc kubenswrapper[4753]: E1205 17:06:39.964198 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:06:40.464113631 +0000 UTC m=+138.967220667 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.964478 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bb0b91f2-5d2a-4bd3-92da-6573800548ff-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-tfkgs\" (UID: \"bb0b91f2-5d2a-4bd3-92da-6573800548ff\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tfkgs" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.964737 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/cf24b057-f212-46d9-a3f8-0ef08669940d-config-volume\") pod \"dns-default-xhb8z\" (UID: \"cf24b057-f212-46d9-a3f8-0ef08669940d\") " pod="openshift-dns/dns-default-xhb8z" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.965036 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6stz8\" (UniqueName: \"kubernetes.io/projected/5881adf5-7728-46d0-b449-b0d8b0d77c7d-kube-api-access-6stz8\") pod \"olm-operator-6b444d44fb-w58zz\" (UID: \"5881adf5-7728-46d0-b449-b0d8b0d77c7d\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-w58zz" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.965298 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/c8b527e5-cd18-4b81-aa80-0c7004486286-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-mjbr2\" (UID: \"c8b527e5-cd18-4b81-aa80-0c7004486286\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mjbr2" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.965476 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8dfb6055-bb8a-4b03-88cb-0ec84a2b1490-config\") pod \"kube-controller-manager-operator-78b949d7b-m9rf5\" (UID: \"8dfb6055-bb8a-4b03-88cb-0ec84a2b1490\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-m9rf5" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.965642 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-znzwq\" (UniqueName: \"kubernetes.io/projected/07ba614e-886f-423d-8ddf-5b727931f58b-kube-api-access-znzwq\") pod \"ingress-canary-wxsmf\" (UID: \"07ba614e-886f-423d-8ddf-5b727931f58b\") " pod="openshift-ingress-canary/ingress-canary-wxsmf" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.965805 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/de65df6c-3a9a-4041-a67c-e6cbd766b4b2-metrics-certs\") pod \"router-default-5444994796-9tzjp\" (UID: \"de65df6c-3a9a-4041-a67c-e6cbd766b4b2\") " pod="openshift-ingress/router-default-5444994796-9tzjp" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.966045 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/24e4585c-4850-4101-a40d-b38424860805-socket-dir\") pod \"csi-hostpathplugin-djddj\" (UID: \"24e4585c-4850-4101-a40d-b38424860805\") " pod="hostpath-provisioner/csi-hostpathplugin-djddj" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.966323 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/667c9562-fe34-4ad9-9150-43613978c0d8-signing-cabundle\") pod \"service-ca-9c57cc56f-hq6h7\" (UID: \"667c9562-fe34-4ad9-9150-43613978c0d8\") " pod="openshift-service-ca/service-ca-9c57cc56f-hq6h7" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.966511 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/24e4585c-4850-4101-a40d-b38424860805-mountpoint-dir\") pod \"csi-hostpathplugin-djddj\" (UID: \"24e4585c-4850-4101-a40d-b38424860805\") " pod="hostpath-provisioner/csi-hostpathplugin-djddj" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.966664 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/07ba614e-886f-423d-8ddf-5b727931f58b-cert\") pod \"ingress-canary-wxsmf\" (UID: \"07ba614e-886f-423d-8ddf-5b727931f58b\") " pod="openshift-ingress-canary/ingress-canary-wxsmf" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.966761 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8dfb6055-bb8a-4b03-88cb-0ec84a2b1490-config\") pod \"kube-controller-manager-operator-78b949d7b-m9rf5\" (UID: \"8dfb6055-bb8a-4b03-88cb-0ec84a2b1490\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-m9rf5" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.966661 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/24e4585c-4850-4101-a40d-b38424860805-mountpoint-dir\") pod \"csi-hostpathplugin-djddj\" (UID: \"24e4585c-4850-4101-a40d-b38424860805\") " pod="hostpath-provisioner/csi-hostpathplugin-djddj" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.965877 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bb0b91f2-5d2a-4bd3-92da-6573800548ff-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-tfkgs\" (UID: \"bb0b91f2-5d2a-4bd3-92da-6573800548ff\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tfkgs" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.966578 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/24e4585c-4850-4101-a40d-b38424860805-socket-dir\") pod \"csi-hostpathplugin-djddj\" (UID: \"24e4585c-4850-4101-a40d-b38424860805\") " pod="hostpath-provisioner/csi-hostpathplugin-djddj" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.967220 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cbcd15e6-1457-4722-b319-13651ca7598b-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-69bc9\" (UID: \"cbcd15e6-1457-4722-b319-13651ca7598b\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-69bc9" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.967389 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/24e4585c-4850-4101-a40d-b38424860805-plugins-dir\") pod \"csi-hostpathplugin-djddj\" (UID: \"24e4585c-4850-4101-a40d-b38424860805\") " pod="hostpath-provisioner/csi-hostpathplugin-djddj" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.967556 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e87071bd-a323-4f69-89a9-7c2a37bb27a4-config\") pod \"service-ca-operator-777779d784-hqm52\" (UID: \"e87071bd-a323-4f69-89a9-7c2a37bb27a4\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-hqm52" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.967713 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6rqfc\" (UniqueName: \"kubernetes.io/projected/e87071bd-a323-4f69-89a9-7c2a37bb27a4-kube-api-access-6rqfc\") pod \"service-ca-operator-777779d784-hqm52\" (UID: \"e87071bd-a323-4f69-89a9-7c2a37bb27a4\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-hqm52" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.967826 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/667c9562-fe34-4ad9-9150-43613978c0d8-signing-cabundle\") pod \"service-ca-9c57cc56f-hq6h7\" (UID: \"667c9562-fe34-4ad9-9150-43613978c0d8\") " pod="openshift-service-ca/service-ca-9c57cc56f-hq6h7" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.967958 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/24e4585c-4850-4101-a40d-b38424860805-plugins-dir\") pod \"csi-hostpathplugin-djddj\" (UID: \"24e4585c-4850-4101-a40d-b38424860805\") " pod="hostpath-provisioner/csi-hostpathplugin-djddj" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.968338 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/660abb91-a2d0-4cf3-b0d0-073b1cccdf37-srv-cert\") pod \"catalog-operator-68c6474976-flncn\" (UID: \"660abb91-a2d0-4cf3-b0d0-073b1cccdf37\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-flncn" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.968749 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/1b6a5e8c-2aea-47e2-802c-8604814dbf18-certs\") pod \"machine-config-server-4ctlc\" (UID: \"1b6a5e8c-2aea-47e2-802c-8604814dbf18\") " pod="openshift-machine-config-operator/machine-config-server-4ctlc" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.968969 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f2ff5f2-b8e0-4d37-80df-3ff278b01d0a-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-wqn7l\" (UID: \"3f2ff5f2-b8e0-4d37-80df-3ff278b01d0a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-wqn7l" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.969209 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e87071bd-a323-4f69-89a9-7c2a37bb27a4-serving-cert\") pod \"service-ca-operator-777779d784-hqm52\" (UID: \"e87071bd-a323-4f69-89a9-7c2a37bb27a4\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-hqm52" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.969373 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/6b4d13e0-6218-444d-822b-656abbf0b80c-proxy-tls\") pod \"machine-config-operator-74547568cd-55drw\" (UID: \"6b4d13e0-6218-444d-822b-656abbf0b80c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-55drw" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.969544 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c18256b2-4287-4df0-8819-201cf14c6380-secret-volume\") pod \"collect-profiles-29415900-2lmxf\" (UID: \"c18256b2-4287-4df0-8819-201cf14c6380\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415900-2lmxf" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.969701 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x9vg6\" (UniqueName: \"kubernetes.io/projected/92cd61ad-2356-4854-ad74-bde03a83abf0-kube-api-access-x9vg6\") pod \"dns-operator-744455d44c-hzrmq\" (UID: \"92cd61ad-2356-4854-ad74-bde03a83abf0\") " pod="openshift-dns-operator/dns-operator-744455d44c-hzrmq" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.969871 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/d4774a0f-e7ca-49bf-a25e-c2a88a7cdc3b-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-22ncf\" (UID: \"d4774a0f-e7ca-49bf-a25e-c2a88a7cdc3b\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-22ncf" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.970055 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ndp9z\" (UniqueName: \"kubernetes.io/projected/cf24b057-f212-46d9-a3f8-0ef08669940d-kube-api-access-ndp9z\") pod \"dns-default-xhb8z\" (UID: \"cf24b057-f212-46d9-a3f8-0ef08669940d\") " pod="openshift-dns/dns-default-xhb8z" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.969569 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/c8b527e5-cd18-4b81-aa80-0c7004486286-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-mjbr2\" (UID: \"c8b527e5-cd18-4b81-aa80-0c7004486286\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mjbr2" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.969280 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e87071bd-a323-4f69-89a9-7c2a37bb27a4-config\") pod \"service-ca-operator-777779d784-hqm52\" (UID: \"e87071bd-a323-4f69-89a9-7c2a37bb27a4\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-hqm52" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.968560 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cbcd15e6-1457-4722-b319-13651ca7598b-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-69bc9\" (UID: \"cbcd15e6-1457-4722-b319-13651ca7598b\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-69bc9" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.970063 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3f2ff5f2-b8e0-4d37-80df-3ff278b01d0a-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-wqn7l\" (UID: \"3f2ff5f2-b8e0-4d37-80df-3ff278b01d0a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-wqn7l" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.970555 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/667c9562-fe34-4ad9-9150-43613978c0d8-signing-key\") pod \"service-ca-9c57cc56f-hq6h7\" (UID: \"667c9562-fe34-4ad9-9150-43613978c0d8\") " pod="openshift-service-ca/service-ca-9c57cc56f-hq6h7" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.970653 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x92ch\" (UniqueName: \"kubernetes.io/projected/4bad5655-05a5-4081-868b-3d2e69df620f-kube-api-access-x92ch\") pod \"cluster-samples-operator-665b6dd947-wk9v7\" (UID: \"4bad5655-05a5-4081-868b-3d2e69df620f\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-wk9v7" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.970721 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c9582e4b-c48a-470f-a0f7-9cbad68d972f-config\") pod \"etcd-operator-b45778765-lcd59\" (UID: \"c9582e4b-c48a-470f-a0f7-9cbad68d972f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-lcd59" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.970763 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5r9xp\" (UniqueName: \"kubernetes.io/projected/c9582e4b-c48a-470f-a0f7-9cbad68d972f-kube-api-access-5r9xp\") pod \"etcd-operator-b45778765-lcd59\" (UID: \"c9582e4b-c48a-470f-a0f7-9cbad68d972f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-lcd59" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.970802 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/de65df6c-3a9a-4041-a67c-e6cbd766b4b2-stats-auth\") pod \"router-default-5444994796-9tzjp\" (UID: \"de65df6c-3a9a-4041-a67c-e6cbd766b4b2\") " pod="openshift-ingress/router-default-5444994796-9tzjp" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.970851 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/6b4d13e0-6218-444d-822b-656abbf0b80c-auth-proxy-config\") pod \"machine-config-operator-74547568cd-55drw\" (UID: \"6b4d13e0-6218-444d-822b-656abbf0b80c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-55drw" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.970863 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/07ba614e-886f-423d-8ddf-5b727931f58b-cert\") pod \"ingress-canary-wxsmf\" (UID: \"07ba614e-886f-423d-8ddf-5b727931f58b\") " pod="openshift-ingress-canary/ingress-canary-wxsmf" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.970887 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/92cd61ad-2356-4854-ad74-bde03a83abf0-metrics-tls\") pod \"dns-operator-744455d44c-hzrmq\" (UID: \"92cd61ad-2356-4854-ad74-bde03a83abf0\") " pod="openshift-dns-operator/dns-operator-744455d44c-hzrmq" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.970973 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/de65df6c-3a9a-4041-a67c-e6cbd766b4b2-default-certificate\") pod \"router-default-5444994796-9tzjp\" (UID: \"de65df6c-3a9a-4041-a67c-e6cbd766b4b2\") " pod="openshift-ingress/router-default-5444994796-9tzjp" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.971028 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rphmt\" (UniqueName: \"kubernetes.io/projected/03fd3f93-fb41-4e32-8276-416c40f2b9a7-kube-api-access-rphmt\") pod \"marketplace-operator-79b997595-s87ds\" (UID: \"03fd3f93-fb41-4e32-8276-416c40f2b9a7\") " pod="openshift-marketplace/marketplace-operator-79b997595-s87ds" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.971062 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/783086d1-b0d9-4f35-b3d1-95cd8df517aa-tmpfs\") pod \"packageserver-d55dfcdfc-9j8t7\" (UID: \"783086d1-b0d9-4f35-b3d1-95cd8df517aa\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-9j8t7" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.971174 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8dfb6055-bb8a-4b03-88cb-0ec84a2b1490-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-m9rf5\" (UID: \"8dfb6055-bb8a-4b03-88cb-0ec84a2b1490\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-m9rf5" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.971214 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/24e4585c-4850-4101-a40d-b38424860805-registration-dir\") pod \"csi-hostpathplugin-djddj\" (UID: \"24e4585c-4850-4101-a40d-b38424860805\") " pod="hostpath-provisioner/csi-hostpathplugin-djddj" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.971245 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4sgj8\" (UniqueName: \"kubernetes.io/projected/783086d1-b0d9-4f35-b3d1-95cd8df517aa-kube-api-access-4sgj8\") pod \"packageserver-d55dfcdfc-9j8t7\" (UID: \"783086d1-b0d9-4f35-b3d1-95cd8df517aa\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-9j8t7" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.971270 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4l4dt\" (UniqueName: \"kubernetes.io/projected/27067136-5ba5-407b-a4a5-4d1e8c284564-kube-api-access-4l4dt\") pod \"control-plane-machine-set-operator-78cbb6b69f-pcb9b\" (UID: \"27067136-5ba5-407b-a4a5-4d1e8c284564\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-pcb9b" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.971296 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/c9582e4b-c48a-470f-a0f7-9cbad68d972f-etcd-ca\") pod \"etcd-operator-b45778765-lcd59\" (UID: \"c9582e4b-c48a-470f-a0f7-9cbad68d972f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-lcd59" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.971329 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qs7sx\" (UniqueName: \"kubernetes.io/projected/24e4585c-4850-4101-a40d-b38424860805-kube-api-access-qs7sx\") pod \"csi-hostpathplugin-djddj\" (UID: \"24e4585c-4850-4101-a40d-b38424860805\") " pod="hostpath-provisioner/csi-hostpathplugin-djddj" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.971353 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/783086d1-b0d9-4f35-b3d1-95cd8df517aa-apiservice-cert\") pod \"packageserver-d55dfcdfc-9j8t7\" (UID: \"783086d1-b0d9-4f35-b3d1-95cd8df517aa\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-9j8t7" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.971409 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/c9582e4b-c48a-470f-a0f7-9cbad68d972f-etcd-client\") pod \"etcd-operator-b45778765-lcd59\" (UID: \"c9582e4b-c48a-470f-a0f7-9cbad68d972f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-lcd59" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.971469 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/03fd3f93-fb41-4e32-8276-416c40f2b9a7-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-s87ds\" (UID: \"03fd3f93-fb41-4e32-8276-416c40f2b9a7\") " pod="openshift-marketplace/marketplace-operator-79b997595-s87ds" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.971502 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nqr46\" (UniqueName: \"kubernetes.io/projected/c8b527e5-cd18-4b81-aa80-0c7004486286-kube-api-access-nqr46\") pod \"package-server-manager-789f6589d5-mjbr2\" (UID: \"c8b527e5-cd18-4b81-aa80-0c7004486286\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mjbr2" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.971555 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fkr5c\" (UniqueName: \"kubernetes.io/projected/86d71fb0-ec2b-4606-bb01-0e88f42b572b-kube-api-access-fkr5c\") pod \"migrator-59844c95c7-rvmt6\" (UID: \"86d71fb0-ec2b-4606-bb01-0e88f42b572b\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-rvmt6" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.971583 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3f2ff5f2-b8e0-4d37-80df-3ff278b01d0a-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-wqn7l\" (UID: \"3f2ff5f2-b8e0-4d37-80df-3ff278b01d0a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-wqn7l" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.971608 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c9582e4b-c48a-470f-a0f7-9cbad68d972f-serving-cert\") pod \"etcd-operator-b45778765-lcd59\" (UID: \"c9582e4b-c48a-470f-a0f7-9cbad68d972f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-lcd59" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.971633 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8dfb6055-bb8a-4b03-88cb-0ec84a2b1490-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-m9rf5\" (UID: \"8dfb6055-bb8a-4b03-88cb-0ec84a2b1490\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-m9rf5" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.971659 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jwnb8\" (UniqueName: \"kubernetes.io/projected/c18256b2-4287-4df0-8819-201cf14c6380-kube-api-access-jwnb8\") pod \"collect-profiles-29415900-2lmxf\" (UID: \"c18256b2-4287-4df0-8819-201cf14c6380\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415900-2lmxf" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.971696 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/d4774a0f-e7ca-49bf-a25e-c2a88a7cdc3b-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-22ncf\" (UID: \"d4774a0f-e7ca-49bf-a25e-c2a88a7cdc3b\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-22ncf" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.971710 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/cf24b057-f212-46d9-a3f8-0ef08669940d-metrics-tls\") pod \"dns-default-xhb8z\" (UID: \"cf24b057-f212-46d9-a3f8-0ef08669940d\") " pod="openshift-dns/dns-default-xhb8z" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.971789 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c9582e4b-c48a-470f-a0f7-9cbad68d972f-config\") pod \"etcd-operator-b45778765-lcd59\" (UID: \"c9582e4b-c48a-470f-a0f7-9cbad68d972f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-lcd59" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.971802 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bb0b91f2-5d2a-4bd3-92da-6573800548ff-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-tfkgs\" (UID: \"bb0b91f2-5d2a-4bd3-92da-6573800548ff\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tfkgs" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.971918 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fc777\" (UniqueName: \"kubernetes.io/projected/6b4d13e0-6218-444d-822b-656abbf0b80c-kube-api-access-fc777\") pod \"machine-config-operator-74547568cd-55drw\" (UID: \"6b4d13e0-6218-444d-822b-656abbf0b80c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-55drw" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.971953 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zcnps\" (UniqueName: \"kubernetes.io/projected/1b6a5e8c-2aea-47e2-802c-8604814dbf18-kube-api-access-zcnps\") pod \"machine-config-server-4ctlc\" (UID: \"1b6a5e8c-2aea-47e2-802c-8604814dbf18\") " pod="openshift-machine-config-operator/machine-config-server-4ctlc" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.971989 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/4bad5655-05a5-4081-868b-3d2e69df620f-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-wk9v7\" (UID: \"4bad5655-05a5-4081-868b-3d2e69df620f\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-wk9v7" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.972274 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.972305 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6b4d13e0-6218-444d-822b-656abbf0b80c-images\") pod \"machine-config-operator-74547568cd-55drw\" (UID: \"6b4d13e0-6218-444d-822b-656abbf0b80c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-55drw" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.972311 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/24e4585c-4850-4101-a40d-b38424860805-registration-dir\") pod \"csi-hostpathplugin-djddj\" (UID: \"24e4585c-4850-4101-a40d-b38424860805\") " pod="hostpath-provisioner/csi-hostpathplugin-djddj" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.972372 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fcjns\" (UniqueName: \"kubernetes.io/projected/c81db375-2956-49d8-856b-90395544e758-kube-api-access-fcjns\") pod \"multus-admission-controller-857f4d67dd-tqddg\" (UID: \"c81db375-2956-49d8-856b-90395544e758\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-tqddg" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.972419 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c18256b2-4287-4df0-8819-201cf14c6380-config-volume\") pod \"collect-profiles-29415900-2lmxf\" (UID: \"c18256b2-4287-4df0-8819-201cf14c6380\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415900-2lmxf" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.972442 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/c81db375-2956-49d8-856b-90395544e758-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-tqddg\" (UID: \"c81db375-2956-49d8-856b-90395544e758\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-tqddg" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.972459 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/c9582e4b-c48a-470f-a0f7-9cbad68d972f-etcd-service-ca\") pod \"etcd-operator-b45778765-lcd59\" (UID: \"c9582e4b-c48a-470f-a0f7-9cbad68d972f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-lcd59" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.972483 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f69r6\" (UniqueName: \"kubernetes.io/projected/660abb91-a2d0-4cf3-b0d0-073b1cccdf37-kube-api-access-f69r6\") pod \"catalog-operator-68c6474976-flncn\" (UID: \"660abb91-a2d0-4cf3-b0d0-073b1cccdf37\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-flncn" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.972522 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l2phx\" (UniqueName: \"kubernetes.io/projected/667c9562-fe34-4ad9-9150-43613978c0d8-kube-api-access-l2phx\") pod \"service-ca-9c57cc56f-hq6h7\" (UID: \"667c9562-fe34-4ad9-9150-43613978c0d8\") " pod="openshift-service-ca/service-ca-9c57cc56f-hq6h7" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.972555 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/d4774a0f-e7ca-49bf-a25e-c2a88a7cdc3b-proxy-tls\") pod \"machine-config-controller-84d6567774-22ncf\" (UID: \"d4774a0f-e7ca-49bf-a25e-c2a88a7cdc3b\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-22ncf" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.972596 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wvmmb\" (UniqueName: \"kubernetes.io/projected/bb0b91f2-5d2a-4bd3-92da-6573800548ff-kube-api-access-wvmmb\") pod \"openshift-controller-manager-operator-756b6f6bc6-tfkgs\" (UID: \"bb0b91f2-5d2a-4bd3-92da-6573800548ff\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tfkgs" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.972622 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tgb84\" (UniqueName: \"kubernetes.io/projected/d4774a0f-e7ca-49bf-a25e-c2a88a7cdc3b-kube-api-access-tgb84\") pod \"machine-config-controller-84d6567774-22ncf\" (UID: \"d4774a0f-e7ca-49bf-a25e-c2a88a7cdc3b\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-22ncf" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.972643 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cbcd15e6-1457-4722-b319-13651ca7598b-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-69bc9\" (UID: \"cbcd15e6-1457-4722-b319-13651ca7598b\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-69bc9" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.972666 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/660abb91-a2d0-4cf3-b0d0-073b1cccdf37-profile-collector-cert\") pod \"catalog-operator-68c6474976-flncn\" (UID: \"660abb91-a2d0-4cf3-b0d0-073b1cccdf37\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-flncn" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.972701 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/de65df6c-3a9a-4041-a67c-e6cbd766b4b2-service-ca-bundle\") pod \"router-default-5444994796-9tzjp\" (UID: \"de65df6c-3a9a-4041-a67c-e6cbd766b4b2\") " pod="openshift-ingress/router-default-5444994796-9tzjp" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.972749 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/1b6a5e8c-2aea-47e2-802c-8604814dbf18-node-bootstrap-token\") pod \"machine-config-server-4ctlc\" (UID: \"1b6a5e8c-2aea-47e2-802c-8604814dbf18\") " pod="openshift-machine-config-operator/machine-config-server-4ctlc" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.972791 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/27067136-5ba5-407b-a4a5-4d1e8c284564-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-pcb9b\" (UID: \"27067136-5ba5-407b-a4a5-4d1e8c284564\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-pcb9b" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.972832 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/783086d1-b0d9-4f35-b3d1-95cd8df517aa-webhook-cert\") pod \"packageserver-d55dfcdfc-9j8t7\" (UID: \"783086d1-b0d9-4f35-b3d1-95cd8df517aa\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-9j8t7" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.972858 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/5881adf5-7728-46d0-b449-b0d8b0d77c7d-profile-collector-cert\") pod \"olm-operator-6b444d44fb-w58zz\" (UID: \"5881adf5-7728-46d0-b449-b0d8b0d77c7d\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-w58zz" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.972877 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/03fd3f93-fb41-4e32-8276-416c40f2b9a7-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-s87ds\" (UID: \"03fd3f93-fb41-4e32-8276-416c40f2b9a7\") " pod="openshift-marketplace/marketplace-operator-79b997595-s87ds" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.972896 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3f2ff5f2-b8e0-4d37-80df-3ff278b01d0a-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-wqn7l\" (UID: \"3f2ff5f2-b8e0-4d37-80df-3ff278b01d0a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-wqn7l" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.972927 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rg5lz\" (UniqueName: \"kubernetes.io/projected/de65df6c-3a9a-4041-a67c-e6cbd766b4b2-kube-api-access-rg5lz\") pod \"router-default-5444994796-9tzjp\" (UID: \"de65df6c-3a9a-4041-a67c-e6cbd766b4b2\") " pod="openshift-ingress/router-default-5444994796-9tzjp" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.972948 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/5881adf5-7728-46d0-b449-b0d8b0d77c7d-srv-cert\") pod \"olm-operator-6b444d44fb-w58zz\" (UID: \"5881adf5-7728-46d0-b449-b0d8b0d77c7d\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-w58zz" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.972970 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/24e4585c-4850-4101-a40d-b38424860805-csi-data-dir\") pod \"csi-hostpathplugin-djddj\" (UID: \"24e4585c-4850-4101-a40d-b38424860805\") " pod="hostpath-provisioner/csi-hostpathplugin-djddj" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.972988 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hd4x2\" (UniqueName: \"kubernetes.io/projected/cbcd15e6-1457-4722-b319-13651ca7598b-kube-api-access-hd4x2\") pod \"kube-storage-version-migrator-operator-b67b599dd-69bc9\" (UID: \"cbcd15e6-1457-4722-b319-13651ca7598b\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-69bc9" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.973316 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/c9582e4b-c48a-470f-a0f7-9cbad68d972f-etcd-ca\") pod \"etcd-operator-b45778765-lcd59\" (UID: \"c9582e4b-c48a-470f-a0f7-9cbad68d972f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-lcd59" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.973646 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/783086d1-b0d9-4f35-b3d1-95cd8df517aa-tmpfs\") pod \"packageserver-d55dfcdfc-9j8t7\" (UID: \"783086d1-b0d9-4f35-b3d1-95cd8df517aa\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-9j8t7" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.973683 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/6b4d13e0-6218-444d-822b-656abbf0b80c-auth-proxy-config\") pod \"machine-config-operator-74547568cd-55drw\" (UID: \"6b4d13e0-6218-444d-822b-656abbf0b80c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-55drw" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.974575 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c18256b2-4287-4df0-8819-201cf14c6380-secret-volume\") pod \"collect-profiles-29415900-2lmxf\" (UID: \"c18256b2-4287-4df0-8819-201cf14c6380\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415900-2lmxf" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.975006 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/de65df6c-3a9a-4041-a67c-e6cbd766b4b2-service-ca-bundle\") pod \"router-default-5444994796-9tzjp\" (UID: \"de65df6c-3a9a-4041-a67c-e6cbd766b4b2\") " pod="openshift-ingress/router-default-5444994796-9tzjp" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.976057 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c18256b2-4287-4df0-8819-201cf14c6380-config-volume\") pod \"collect-profiles-29415900-2lmxf\" (UID: \"c18256b2-4287-4df0-8819-201cf14c6380\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415900-2lmxf" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.976332 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/03fd3f93-fb41-4e32-8276-416c40f2b9a7-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-s87ds\" (UID: \"03fd3f93-fb41-4e32-8276-416c40f2b9a7\") " pod="openshift-marketplace/marketplace-operator-79b997595-s87ds" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.976837 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/c9582e4b-c48a-470f-a0f7-9cbad68d972f-etcd-service-ca\") pod \"etcd-operator-b45778765-lcd59\" (UID: \"c9582e4b-c48a-470f-a0f7-9cbad68d972f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-lcd59" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.977299 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e87071bd-a323-4f69-89a9-7c2a37bb27a4-serving-cert\") pod \"service-ca-operator-777779d784-hqm52\" (UID: \"e87071bd-a323-4f69-89a9-7c2a37bb27a4\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-hqm52" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.977679 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/24e4585c-4850-4101-a40d-b38424860805-csi-data-dir\") pod \"csi-hostpathplugin-djddj\" (UID: \"24e4585c-4850-4101-a40d-b38424860805\") " pod="hostpath-provisioner/csi-hostpathplugin-djddj" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.977912 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6b4d13e0-6218-444d-822b-656abbf0b80c-images\") pod \"machine-config-operator-74547568cd-55drw\" (UID: \"6b4d13e0-6218-444d-822b-656abbf0b80c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-55drw" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.977927 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/de65df6c-3a9a-4041-a67c-e6cbd766b4b2-stats-auth\") pod \"router-default-5444994796-9tzjp\" (UID: \"de65df6c-3a9a-4041-a67c-e6cbd766b4b2\") " pod="openshift-ingress/router-default-5444994796-9tzjp" Dec 05 17:06:39 crc kubenswrapper[4753]: E1205 17:06:39.978479 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:06:40.478446843 +0000 UTC m=+138.981554029 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4vvd9" (UID: "abca9c47-e52b-4410-83e1-b486f8f01aca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.978559 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/03fd3f93-fb41-4e32-8276-416c40f2b9a7-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-s87ds\" (UID: \"03fd3f93-fb41-4e32-8276-416c40f2b9a7\") " pod="openshift-marketplace/marketplace-operator-79b997595-s87ds" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.980022 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/92cd61ad-2356-4854-ad74-bde03a83abf0-metrics-tls\") pod \"dns-operator-744455d44c-hzrmq\" (UID: \"92cd61ad-2356-4854-ad74-bde03a83abf0\") " pod="openshift-dns-operator/dns-operator-744455d44c-hzrmq" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.980503 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8dfb6055-bb8a-4b03-88cb-0ec84a2b1490-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-m9rf5\" (UID: \"8dfb6055-bb8a-4b03-88cb-0ec84a2b1490\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-m9rf5" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.980527 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/c9582e4b-c48a-470f-a0f7-9cbad68d972f-etcd-client\") pod \"etcd-operator-b45778765-lcd59\" (UID: \"c9582e4b-c48a-470f-a0f7-9cbad68d972f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-lcd59" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.980941 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/6b4d13e0-6218-444d-822b-656abbf0b80c-proxy-tls\") pod \"machine-config-operator-74547568cd-55drw\" (UID: \"6b4d13e0-6218-444d-822b-656abbf0b80c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-55drw" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.981439 4753 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.981553 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/5881adf5-7728-46d0-b449-b0d8b0d77c7d-srv-cert\") pod \"olm-operator-6b444d44fb-w58zz\" (UID: \"5881adf5-7728-46d0-b449-b0d8b0d77c7d\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-w58zz" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.982192 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3f2ff5f2-b8e0-4d37-80df-3ff278b01d0a-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-wqn7l\" (UID: \"3f2ff5f2-b8e0-4d37-80df-3ff278b01d0a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-wqn7l" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.982437 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/c81db375-2956-49d8-856b-90395544e758-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-tqddg\" (UID: \"c81db375-2956-49d8-856b-90395544e758\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-tqddg" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.982751 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/d4774a0f-e7ca-49bf-a25e-c2a88a7cdc3b-proxy-tls\") pod \"machine-config-controller-84d6567774-22ncf\" (UID: \"d4774a0f-e7ca-49bf-a25e-c2a88a7cdc3b\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-22ncf" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.982840 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/660abb91-a2d0-4cf3-b0d0-073b1cccdf37-srv-cert\") pod \"catalog-operator-68c6474976-flncn\" (UID: \"660abb91-a2d0-4cf3-b0d0-073b1cccdf37\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-flncn" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.983458 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/de65df6c-3a9a-4041-a67c-e6cbd766b4b2-default-certificate\") pod \"router-default-5444994796-9tzjp\" (UID: \"de65df6c-3a9a-4041-a67c-e6cbd766b4b2\") " pod="openshift-ingress/router-default-5444994796-9tzjp" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.983558 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bb0b91f2-5d2a-4bd3-92da-6573800548ff-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-tfkgs\" (UID: \"bb0b91f2-5d2a-4bd3-92da-6573800548ff\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tfkgs" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.983864 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cbcd15e6-1457-4722-b319-13651ca7598b-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-69bc9\" (UID: \"cbcd15e6-1457-4722-b319-13651ca7598b\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-69bc9" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.984100 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/27067136-5ba5-407b-a4a5-4d1e8c284564-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-pcb9b\" (UID: \"27067136-5ba5-407b-a4a5-4d1e8c284564\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-pcb9b" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.984132 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/5881adf5-7728-46d0-b449-b0d8b0d77c7d-profile-collector-cert\") pod \"olm-operator-6b444d44fb-w58zz\" (UID: \"5881adf5-7728-46d0-b449-b0d8b0d77c7d\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-w58zz" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.985185 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/4bad5655-05a5-4081-868b-3d2e69df620f-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-wk9v7\" (UID: \"4bad5655-05a5-4081-868b-3d2e69df620f\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-wk9v7" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.985250 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/667c9562-fe34-4ad9-9150-43613978c0d8-signing-key\") pod \"service-ca-9c57cc56f-hq6h7\" (UID: \"667c9562-fe34-4ad9-9150-43613978c0d8\") " pod="openshift-service-ca/service-ca-9c57cc56f-hq6h7" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.986890 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/783086d1-b0d9-4f35-b3d1-95cd8df517aa-webhook-cert\") pod \"packageserver-d55dfcdfc-9j8t7\" (UID: \"783086d1-b0d9-4f35-b3d1-95cd8df517aa\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-9j8t7" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.987080 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/660abb91-a2d0-4cf3-b0d0-073b1cccdf37-profile-collector-cert\") pod \"catalog-operator-68c6474976-flncn\" (UID: \"660abb91-a2d0-4cf3-b0d0-073b1cccdf37\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-flncn" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.987106 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/783086d1-b0d9-4f35-b3d1-95cd8df517aa-apiservice-cert\") pod \"packageserver-d55dfcdfc-9j8t7\" (UID: \"783086d1-b0d9-4f35-b3d1-95cd8df517aa\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-9j8t7" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.988336 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c9582e4b-c48a-470f-a0f7-9cbad68d972f-serving-cert\") pod \"etcd-operator-b45778765-lcd59\" (UID: \"c9582e4b-c48a-470f-a0f7-9cbad68d972f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-lcd59" Dec 05 17:06:39 crc kubenswrapper[4753]: I1205 17:06:39.995091 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/de65df6c-3a9a-4041-a67c-e6cbd766b4b2-metrics-certs\") pod \"router-default-5444994796-9tzjp\" (UID: \"de65df6c-3a9a-4041-a67c-e6cbd766b4b2\") " pod="openshift-ingress/router-default-5444994796-9tzjp" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.002743 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.015677 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/cf24b057-f212-46d9-a3f8-0ef08669940d-metrics-tls\") pod \"dns-default-xhb8z\" (UID: \"cf24b057-f212-46d9-a3f8-0ef08669940d\") " pod="openshift-dns/dns-default-xhb8z" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.021855 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.027411 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/cf24b057-f212-46d9-a3f8-0ef08669940d-config-volume\") pod \"dns-default-xhb8z\" (UID: \"cf24b057-f212-46d9-a3f8-0ef08669940d\") " pod="openshift-dns/dns-default-xhb8z" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.043810 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.061651 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.074562 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:06:40 crc kubenswrapper[4753]: E1205 17:06:40.074735 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:06:40.574709752 +0000 UTC m=+139.077816758 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.074893 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:40 crc kubenswrapper[4753]: E1205 17:06:40.075685 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:06:40.57565665 +0000 UTC m=+139.078763866 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4vvd9" (UID: "abca9c47-e52b-4410-83e1-b486f8f01aca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.081488 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.089031 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/1b6a5e8c-2aea-47e2-802c-8604814dbf18-node-bootstrap-token\") pod \"machine-config-server-4ctlc\" (UID: \"1b6a5e8c-2aea-47e2-802c-8604814dbf18\") " pod="openshift-machine-config-operator/machine-config-server-4ctlc" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.101220 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.112081 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/1b6a5e8c-2aea-47e2-802c-8604814dbf18-certs\") pod \"machine-config-server-4ctlc\" (UID: \"1b6a5e8c-2aea-47e2-802c-8604814dbf18\") " pod="openshift-machine-config-operator/machine-config-server-4ctlc" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.137402 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k6mtt\" (UniqueName: \"kubernetes.io/projected/1e11d89c-e71a-4a17-b1dd-da3883753fde-kube-api-access-k6mtt\") pod \"console-operator-58897d9998-64nzp\" (UID: \"1e11d89c-e71a-4a17-b1dd-da3883753fde\") " pod="openshift-console-operator/console-operator-58897d9998-64nzp" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.162726 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4p49w\" (UniqueName: \"kubernetes.io/projected/0aa048c9-05f2-41f3-ad26-c88d5c98c453-kube-api-access-4p49w\") pod \"route-controller-manager-6576b87f9c-7zfzd\" (UID: \"0aa048c9-05f2-41f3-ad26-c88d5c98c453\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-7zfzd" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.176029 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:06:40 crc kubenswrapper[4753]: E1205 17:06:40.176296 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:06:40.676248506 +0000 UTC m=+139.179355522 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.176472 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:40 crc kubenswrapper[4753]: E1205 17:06:40.176815 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:06:40.676796632 +0000 UTC m=+139.179903828 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4vvd9" (UID: "abca9c47-e52b-4410-83e1-b486f8f01aca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.184400 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-48llr\" (UniqueName: \"kubernetes.io/projected/915c88c7-cac2-48b3-ab7a-6e23e7240465-kube-api-access-48llr\") pod \"controller-manager-879f6c89f-b7687\" (UID: \"915c88c7-cac2-48b3-ab7a-6e23e7240465\") " pod="openshift-controller-manager/controller-manager-879f6c89f-b7687" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.195421 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/72117afc-2e5f-4696-b515-76aced63c30f-bound-sa-token\") pod \"ingress-operator-5b745b69d9-q2bmk\" (UID: \"72117afc-2e5f-4696-b515-76aced63c30f\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-q2bmk" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.215560 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4gxf2\" (UniqueName: \"kubernetes.io/projected/bcea690d-529b-4175-b5fd-a1a07970cf0d-kube-api-access-4gxf2\") pod \"openshift-config-operator-7777fb866f-4gllz\" (UID: \"bcea690d-529b-4175-b5fd-a1a07970cf0d\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-4gllz" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.234807 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-97z8z\" (UniqueName: \"kubernetes.io/projected/73058959-ba27-4d94-8662-a213a53113e9-kube-api-access-97z8z\") pod \"machine-approver-56656f9798-44d5b\" (UID: \"73058959-ba27-4d94-8662-a213a53113e9\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-44d5b" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.255305 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bz6l4\" (UniqueName: \"kubernetes.io/projected/12c0356d-b61a-47e3-a93d-8d2d743cc9b5-kube-api-access-bz6l4\") pod \"authentication-operator-69f744f599-rnr95\" (UID: \"12c0356d-b61a-47e3-a93d-8d2d743cc9b5\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-rnr95" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.259816 4753 request.go:700] Waited for 1.935702378s due to client-side throttling, not priority and fairness, request: POST:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/serviceaccounts/oauth-openshift/token Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.259854 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-44d5b" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.275596 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k2kb8\" (UniqueName: \"kubernetes.io/projected/c3117f4b-6f3a-4131-9001-d39222e6f268-kube-api-access-k2kb8\") pod \"oauth-openshift-558db77b4-kzdfh\" (UID: \"c3117f4b-6f3a-4131-9001-d39222e6f268\") " pod="openshift-authentication/oauth-openshift-558db77b4-kzdfh" Dec 05 17:06:40 crc kubenswrapper[4753]: W1205 17:06:40.278007 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod73058959_ba27_4d94_8662_a213a53113e9.slice/crio-fc7e84ce63d43369bce41b5c1dc741d560953e52e48451c63ce2185e98fded43 WatchSource:0}: Error finding container fc7e84ce63d43369bce41b5c1dc741d560953e52e48451c63ce2185e98fded43: Status 404 returned error can't find the container with id fc7e84ce63d43369bce41b5c1dc741d560953e52e48451c63ce2185e98fded43 Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.278024 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:06:40 crc kubenswrapper[4753]: E1205 17:06:40.278085 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:06:40.778070288 +0000 UTC m=+139.281177314 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.278651 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:40 crc kubenswrapper[4753]: E1205 17:06:40.279211 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:06:40.779190761 +0000 UTC m=+139.282297777 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4vvd9" (UID: "abca9c47-e52b-4410-83e1-b486f8f01aca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.299659 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h74fl\" (UniqueName: \"kubernetes.io/projected/72117afc-2e5f-4696-b515-76aced63c30f-kube-api-access-h74fl\") pod \"ingress-operator-5b745b69d9-q2bmk\" (UID: \"72117afc-2e5f-4696-b515-76aced63c30f\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-q2bmk" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.315725 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-rnr95" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.321062 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6nwkn\" (UniqueName: \"kubernetes.io/projected/f119bafa-ed3a-42d2-876f-c63999b216e1-kube-api-access-6nwkn\") pod \"downloads-7954f5f757-2nb5t\" (UID: \"f119bafa-ed3a-42d2-876f-c63999b216e1\") " pod="openshift-console/downloads-7954f5f757-2nb5t" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.324111 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-7zfzd" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.337865 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-82rp2\" (UniqueName: \"kubernetes.io/projected/5cd4f96b-673e-4518-a8ee-da3ccb7a86b0-kube-api-access-82rp2\") pod \"machine-api-operator-5694c8668f-vx9s2\" (UID: \"5cd4f96b-673e-4518-a8ee-da3ccb7a86b0\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-vx9s2" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.341391 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-2nb5t" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.357848 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/002aa1a9-0253-4b17-8c8c-d23c830c46cc-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-jwgnx\" (UID: \"002aa1a9-0253-4b17-8c8c-d23c830c46cc\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jwgnx" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.359919 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-4gllz" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.376332 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rjf54\" (UniqueName: \"kubernetes.io/projected/3d179b2f-6775-4d3a-be36-3960799428db-kube-api-access-rjf54\") pod \"cluster-image-registry-operator-dc59b4c8b-nhbkn\" (UID: \"3d179b2f-6775-4d3a-be36-3960799428db\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nhbkn" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.380307 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:06:40 crc kubenswrapper[4753]: E1205 17:06:40.380474 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:06:40.880449087 +0000 UTC m=+139.383556093 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.381909 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:40 crc kubenswrapper[4753]: E1205 17:06:40.383126 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:06:40.883085755 +0000 UTC m=+139.386192761 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4vvd9" (UID: "abca9c47-e52b-4410-83e1-b486f8f01aca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.388224 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-64nzp" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.392644 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-kzdfh" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.398209 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/3d179b2f-6775-4d3a-be36-3960799428db-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-nhbkn\" (UID: \"3d179b2f-6775-4d3a-be36-3960799428db\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nhbkn" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.404406 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-q2bmk" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.412749 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jwgnx" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.418878 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qlmh2\" (UniqueName: \"kubernetes.io/projected/40a88e42-a0d7-4703-a3f8-25f524f90eca-kube-api-access-qlmh2\") pod \"apiserver-7bbb656c7d-zdzdl\" (UID: \"40a88e42-a0d7-4703-a3f8-25f524f90eca\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zdzdl" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.443247 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g7jlp\" (UniqueName: \"kubernetes.io/projected/0f4f139a-e961-4510-9a82-a2de30587b6f-kube-api-access-g7jlp\") pod \"openshift-apiserver-operator-796bbdcf4f-k49xj\" (UID: \"0f4f139a-e961-4510-9a82-a2de30587b6f\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-k49xj" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.461919 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2jd75\" (UniqueName: \"kubernetes.io/projected/782feb3b-5fe4-413e-87a0-9602f412897e-kube-api-access-2jd75\") pod \"apiserver-76f77b778f-rkmxh\" (UID: \"782feb3b-5fe4-413e-87a0-9602f412897e\") " pod="openshift-apiserver/apiserver-76f77b778f-rkmxh" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.464536 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-b7687" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.475580 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-vx9s2" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.477030 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-psmlg\" (UniqueName: \"kubernetes.io/projected/48038042-7b0f-48d9-9f90-6c0b9dd179d6-kube-api-access-psmlg\") pod \"console-f9d7485db-9c622\" (UID: \"48038042-7b0f-48d9-9f90-6c0b9dd179d6\") " pod="openshift-console/console-f9d7485db-9c622" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.483352 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:06:40 crc kubenswrapper[4753]: E1205 17:06:40.483537 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:06:40.983508296 +0000 UTC m=+139.486615312 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.483940 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:40 crc kubenswrapper[4753]: E1205 17:06:40.484342 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:06:40.984330131 +0000 UTC m=+139.487437127 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4vvd9" (UID: "abca9c47-e52b-4410-83e1-b486f8f01aca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.522354 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s92jj\" (UniqueName: \"kubernetes.io/projected/abca9c47-e52b-4410-83e1-b486f8f01aca-kube-api-access-s92jj\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.540950 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-rnr95"] Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.542131 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/abca9c47-e52b-4410-83e1-b486f8f01aca-bound-sa-token\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.546184 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-rkmxh" Dec 05 17:06:40 crc kubenswrapper[4753]: W1205 17:06:40.551566 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod12c0356d_b61a_47e3_a93d_8d2d743cc9b5.slice/crio-0cdbb546f06ee9e074d18b25b1567f8511b4aa363621e1ad5b3b28401097587d WatchSource:0}: Error finding container 0cdbb546f06ee9e074d18b25b1567f8511b4aa363621e1ad5b3b28401097587d: Status 404 returned error can't find the container with id 0cdbb546f06ee9e074d18b25b1567f8511b4aa363621e1ad5b3b28401097587d Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.562659 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6stz8\" (UniqueName: \"kubernetes.io/projected/5881adf5-7728-46d0-b449-b0d8b0d77c7d-kube-api-access-6stz8\") pod \"olm-operator-6b444d44fb-w58zz\" (UID: \"5881adf5-7728-46d0-b449-b0d8b0d77c7d\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-w58zz" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.574286 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zdzdl" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.579764 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-znzwq\" (UniqueName: \"kubernetes.io/projected/07ba614e-886f-423d-8ddf-5b727931f58b-kube-api-access-znzwq\") pod \"ingress-canary-wxsmf\" (UID: \"07ba614e-886f-423d-8ddf-5b727931f58b\") " pod="openshift-ingress-canary/ingress-canary-wxsmf" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.585757 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:06:40 crc kubenswrapper[4753]: E1205 17:06:40.586084 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:06:41.08604092 +0000 UTC m=+139.589147976 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.586293 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:40 crc kubenswrapper[4753]: E1205 17:06:40.586714 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:06:41.086703609 +0000 UTC m=+139.589810605 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4vvd9" (UID: "abca9c47-e52b-4410-83e1-b486f8f01aca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.598943 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6rqfc\" (UniqueName: \"kubernetes.io/projected/e87071bd-a323-4f69-89a9-7c2a37bb27a4-kube-api-access-6rqfc\") pod \"service-ca-operator-777779d784-hqm52\" (UID: \"e87071bd-a323-4f69-89a9-7c2a37bb27a4\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-hqm52" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.611648 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-wxsmf" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.618891 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x9vg6\" (UniqueName: \"kubernetes.io/projected/92cd61ad-2356-4854-ad74-bde03a83abf0-kube-api-access-x9vg6\") pod \"dns-operator-744455d44c-hzrmq\" (UID: \"92cd61ad-2356-4854-ad74-bde03a83abf0\") " pod="openshift-dns-operator/dns-operator-744455d44c-hzrmq" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.633928 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nhbkn" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.637899 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ndp9z\" (UniqueName: \"kubernetes.io/projected/cf24b057-f212-46d9-a3f8-0ef08669940d-kube-api-access-ndp9z\") pod \"dns-default-xhb8z\" (UID: \"cf24b057-f212-46d9-a3f8-0ef08669940d\") " pod="openshift-dns/dns-default-xhb8z" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.640751 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-xhb8z" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.668795 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x92ch\" (UniqueName: \"kubernetes.io/projected/4bad5655-05a5-4081-868b-3d2e69df620f-kube-api-access-x92ch\") pod \"cluster-samples-operator-665b6dd947-wk9v7\" (UID: \"4bad5655-05a5-4081-868b-3d2e69df620f\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-wk9v7" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.673618 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-k49xj" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.679135 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-9c622" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.682315 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5r9xp\" (UniqueName: \"kubernetes.io/projected/c9582e4b-c48a-470f-a0f7-9cbad68d972f-kube-api-access-5r9xp\") pod \"etcd-operator-b45778765-lcd59\" (UID: \"c9582e4b-c48a-470f-a0f7-9cbad68d972f\") " pod="openshift-etcd-operator/etcd-operator-b45778765-lcd59" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.688018 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:06:40 crc kubenswrapper[4753]: E1205 17:06:40.688662 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:06:41.188641895 +0000 UTC m=+139.691748911 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.698530 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qs7sx\" (UniqueName: \"kubernetes.io/projected/24e4585c-4850-4101-a40d-b38424860805-kube-api-access-qs7sx\") pod \"csi-hostpathplugin-djddj\" (UID: \"24e4585c-4850-4101-a40d-b38424860805\") " pod="hostpath-provisioner/csi-hostpathplugin-djddj" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.769289 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rphmt\" (UniqueName: \"kubernetes.io/projected/03fd3f93-fb41-4e32-8276-416c40f2b9a7-kube-api-access-rphmt\") pod \"marketplace-operator-79b997595-s87ds\" (UID: \"03fd3f93-fb41-4e32-8276-416c40f2b9a7\") " pod="openshift-marketplace/marketplace-operator-79b997595-s87ds" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.785016 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-7zfzd"] Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.787292 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-hzrmq" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.790327 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:40 crc kubenswrapper[4753]: E1205 17:06:40.790745 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:06:41.290731716 +0000 UTC m=+139.793838722 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4vvd9" (UID: "abca9c47-e52b-4410-83e1-b486f8f01aca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.794068 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-lcd59" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.801693 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-wk9v7" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.814790 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hd4x2\" (UniqueName: \"kubernetes.io/projected/cbcd15e6-1457-4722-b319-13651ca7598b-kube-api-access-hd4x2\") pod \"kube-storage-version-migrator-operator-b67b599dd-69bc9\" (UID: \"cbcd15e6-1457-4722-b319-13651ca7598b\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-69bc9" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.823717 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fc777\" (UniqueName: \"kubernetes.io/projected/6b4d13e0-6218-444d-822b-656abbf0b80c-kube-api-access-fc777\") pod \"machine-config-operator-74547568cd-55drw\" (UID: \"6b4d13e0-6218-444d-822b-656abbf0b80c\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-55drw" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.824055 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zcnps\" (UniqueName: \"kubernetes.io/projected/1b6a5e8c-2aea-47e2-802c-8604814dbf18-kube-api-access-zcnps\") pod \"machine-config-server-4ctlc\" (UID: \"1b6a5e8c-2aea-47e2-802c-8604814dbf18\") " pod="openshift-machine-config-operator/machine-config-server-4ctlc" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.829617 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-hqm52" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.836961 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nqr46\" (UniqueName: \"kubernetes.io/projected/c8b527e5-cd18-4b81-aa80-0c7004486286-kube-api-access-nqr46\") pod \"package-server-manager-789f6589d5-mjbr2\" (UID: \"c8b527e5-cd18-4b81-aa80-0c7004486286\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mjbr2" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.838586 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-w58zz" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.861880 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-s87ds" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.868928 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fkr5c\" (UniqueName: \"kubernetes.io/projected/86d71fb0-ec2b-4606-bb01-0e88f42b572b-kube-api-access-fkr5c\") pod \"migrator-59844c95c7-rvmt6\" (UID: \"86d71fb0-ec2b-4606-bb01-0e88f42b572b\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-rvmt6" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.876866 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mjbr2" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.890673 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rg5lz\" (UniqueName: \"kubernetes.io/projected/de65df6c-3a9a-4041-a67c-e6cbd766b4b2-kube-api-access-rg5lz\") pod \"router-default-5444994796-9tzjp\" (UID: \"de65df6c-3a9a-4041-a67c-e6cbd766b4b2\") " pod="openshift-ingress/router-default-5444994796-9tzjp" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.891511 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:06:40 crc kubenswrapper[4753]: E1205 17:06:40.892083 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:06:41.392061574 +0000 UTC m=+139.895168590 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.896360 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-55drw" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.912391 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-69bc9" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.917297 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3f2ff5f2-b8e0-4d37-80df-3ff278b01d0a-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-wqn7l\" (UID: \"3f2ff5f2-b8e0-4d37-80df-3ff278b01d0a\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-wqn7l" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.931657 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-djddj" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.932957 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f69r6\" (UniqueName: \"kubernetes.io/projected/660abb91-a2d0-4cf3-b0d0-073b1cccdf37-kube-api-access-f69r6\") pod \"catalog-operator-68c6474976-flncn\" (UID: \"660abb91-a2d0-4cf3-b0d0-073b1cccdf37\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-flncn" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.947813 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-4ctlc" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.948416 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-2nb5t"] Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.950523 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-4gllz"] Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.971543 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-b7687"] Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.974243 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l2phx\" (UniqueName: \"kubernetes.io/projected/667c9562-fe34-4ad9-9150-43613978c0d8-kube-api-access-l2phx\") pod \"service-ca-9c57cc56f-hq6h7\" (UID: \"667c9562-fe34-4ad9-9150-43613978c0d8\") " pod="openshift-service-ca/service-ca-9c57cc56f-hq6h7" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.986371 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wvmmb\" (UniqueName: \"kubernetes.io/projected/bb0b91f2-5d2a-4bd3-92da-6573800548ff-kube-api-access-wvmmb\") pod \"openshift-controller-manager-operator-756b6f6bc6-tfkgs\" (UID: \"bb0b91f2-5d2a-4bd3-92da-6573800548ff\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tfkgs" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.993641 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:40 crc kubenswrapper[4753]: E1205 17:06:40.994102 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:06:41.494083932 +0000 UTC m=+139.997190978 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4vvd9" (UID: "abca9c47-e52b-4410-83e1-b486f8f01aca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.997452 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tgb84\" (UniqueName: \"kubernetes.io/projected/d4774a0f-e7ca-49bf-a25e-c2a88a7cdc3b-kube-api-access-tgb84\") pod \"machine-config-controller-84d6567774-22ncf\" (UID: \"d4774a0f-e7ca-49bf-a25e-c2a88a7cdc3b\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-22ncf" Dec 05 17:06:40 crc kubenswrapper[4753]: I1205 17:06:40.998657 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-44d5b" event={"ID":"73058959-ba27-4d94-8662-a213a53113e9","Type":"ContainerStarted","Data":"fc7e84ce63d43369bce41b5c1dc741d560953e52e48451c63ce2185e98fded43"} Dec 05 17:06:41 crc kubenswrapper[4753]: I1205 17:06:41.000073 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-rnr95" event={"ID":"12c0356d-b61a-47e3-a93d-8d2d743cc9b5","Type":"ContainerStarted","Data":"0cdbb546f06ee9e074d18b25b1567f8511b4aa363621e1ad5b3b28401097587d"} Dec 05 17:06:41 crc kubenswrapper[4753]: I1205 17:06:41.000950 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-7zfzd" event={"ID":"0aa048c9-05f2-41f3-ad26-c88d5c98c453","Type":"ContainerStarted","Data":"9bc21e751dd52dc6dac679d64edd3ad30539759661f044ac1f8dc69a3d3ccb75"} Dec 05 17:06:41 crc kubenswrapper[4753]: I1205 17:06:41.009435 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8dfb6055-bb8a-4b03-88cb-0ec84a2b1490-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-m9rf5\" (UID: \"8dfb6055-bb8a-4b03-88cb-0ec84a2b1490\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-m9rf5" Dec 05 17:06:41 crc kubenswrapper[4753]: I1205 17:06:41.021042 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-m9rf5" Dec 05 17:06:41 crc kubenswrapper[4753]: I1205 17:06:41.023785 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-64nzp"] Dec 05 17:06:41 crc kubenswrapper[4753]: I1205 17:06:41.045676 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-kzdfh"] Dec 05 17:06:41 crc kubenswrapper[4753]: I1205 17:06:41.046833 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jwgnx"] Dec 05 17:06:41 crc kubenswrapper[4753]: I1205 17:06:41.048122 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-q2bmk"] Dec 05 17:06:41 crc kubenswrapper[4753]: I1205 17:06:41.070840 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-rvmt6" Dec 05 17:06:41 crc kubenswrapper[4753]: I1205 17:06:41.076057 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-9tzjp" Dec 05 17:06:41 crc kubenswrapper[4753]: I1205 17:06:41.079428 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tfkgs" Dec 05 17:06:41 crc kubenswrapper[4753]: I1205 17:06:41.096650 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:06:41 crc kubenswrapper[4753]: E1205 17:06:41.097044 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:06:41.597008937 +0000 UTC m=+140.100115943 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:41 crc kubenswrapper[4753]: I1205 17:06:41.097218 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:41 crc kubenswrapper[4753]: E1205 17:06:41.097736 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:06:41.597717118 +0000 UTC m=+140.100824124 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4vvd9" (UID: "abca9c47-e52b-4410-83e1-b486f8f01aca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:41 crc kubenswrapper[4753]: I1205 17:06:41.114328 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-wqn7l" Dec 05 17:06:41 crc kubenswrapper[4753]: I1205 17:06:41.144082 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-hq6h7" Dec 05 17:06:41 crc kubenswrapper[4753]: I1205 17:06:41.152104 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-22ncf" Dec 05 17:06:41 crc kubenswrapper[4753]: I1205 17:06:41.168046 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-flncn" Dec 05 17:06:41 crc kubenswrapper[4753]: I1205 17:06:41.171265 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nhbkn"] Dec 05 17:06:41 crc kubenswrapper[4753]: I1205 17:06:41.201757 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:06:41 crc kubenswrapper[4753]: E1205 17:06:41.208367 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:06:41.70833194 +0000 UTC m=+140.211439096 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:41 crc kubenswrapper[4753]: I1205 17:06:41.208686 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:41 crc kubenswrapper[4753]: E1205 17:06:41.209395 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:06:41.709383891 +0000 UTC m=+140.212490897 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4vvd9" (UID: "abca9c47-e52b-4410-83e1-b486f8f01aca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:41 crc kubenswrapper[4753]: I1205 17:06:41.224868 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-zdzdl"] Dec 05 17:06:41 crc kubenswrapper[4753]: I1205 17:06:41.225646 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-vx9s2"] Dec 05 17:06:41 crc kubenswrapper[4753]: I1205 17:06:41.229184 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-rkmxh"] Dec 05 17:06:41 crc kubenswrapper[4753]: I1205 17:06:41.243224 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-wxsmf"] Dec 05 17:06:41 crc kubenswrapper[4753]: I1205 17:06:41.275825 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-wk9v7"] Dec 05 17:06:41 crc kubenswrapper[4753]: I1205 17:06:41.310101 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:06:41 crc kubenswrapper[4753]: E1205 17:06:41.310550 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:06:41.810520513 +0000 UTC m=+140.313627519 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:41 crc kubenswrapper[4753]: I1205 17:06:41.353745 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-w58zz"] Dec 05 17:06:41 crc kubenswrapper[4753]: I1205 17:06:41.414315 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:41 crc kubenswrapper[4753]: E1205 17:06:41.414691 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:06:41.914677684 +0000 UTC m=+140.417784690 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4vvd9" (UID: "abca9c47-e52b-4410-83e1-b486f8f01aca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:41 crc kubenswrapper[4753]: I1205 17:06:41.415823 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-k49xj"] Dec 05 17:06:41 crc kubenswrapper[4753]: I1205 17:06:41.434179 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-xhb8z"] Dec 05 17:06:41 crc kubenswrapper[4753]: I1205 17:06:41.435338 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-9c622"] Dec 05 17:06:41 crc kubenswrapper[4753]: I1205 17:06:41.506175 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-lcd59"] Dec 05 17:06:41 crc kubenswrapper[4753]: I1205 17:06:41.515846 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:06:41 crc kubenswrapper[4753]: E1205 17:06:41.516226 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:06:42.016207347 +0000 UTC m=+140.519314353 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:41 crc kubenswrapper[4753]: I1205 17:06:41.517180 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-hzrmq"] Dec 05 17:06:41 crc kubenswrapper[4753]: I1205 17:06:41.532826 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-hqm52"] Dec 05 17:06:41 crc kubenswrapper[4753]: I1205 17:06:41.618299 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:41 crc kubenswrapper[4753]: E1205 17:06:41.618783 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:06:42.118762641 +0000 UTC m=+140.621869647 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4vvd9" (UID: "abca9c47-e52b-4410-83e1-b486f8f01aca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:41 crc kubenswrapper[4753]: I1205 17:06:41.719838 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:06:41 crc kubenswrapper[4753]: E1205 17:06:41.720002 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:06:42.219967645 +0000 UTC m=+140.723074671 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:41 crc kubenswrapper[4753]: I1205 17:06:41.720067 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:41 crc kubenswrapper[4753]: E1205 17:06:41.720973 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:06:42.220961185 +0000 UTC m=+140.724068201 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4vvd9" (UID: "abca9c47-e52b-4410-83e1-b486f8f01aca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:41 crc kubenswrapper[4753]: I1205 17:06:41.821255 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:06:41 crc kubenswrapper[4753]: E1205 17:06:41.821475 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:06:42.321446848 +0000 UTC m=+140.824553854 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:41 crc kubenswrapper[4753]: I1205 17:06:41.821679 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:41 crc kubenswrapper[4753]: E1205 17:06:41.822212 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:06:42.32219993 +0000 UTC m=+140.825307156 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4vvd9" (UID: "abca9c47-e52b-4410-83e1-b486f8f01aca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:41 crc kubenswrapper[4753]: I1205 17:06:41.916528 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4sgj8\" (UniqueName: \"kubernetes.io/projected/783086d1-b0d9-4f35-b3d1-95cd8df517aa-kube-api-access-4sgj8\") pod \"packageserver-d55dfcdfc-9j8t7\" (UID: \"783086d1-b0d9-4f35-b3d1-95cd8df517aa\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-9j8t7" Dec 05 17:06:41 crc kubenswrapper[4753]: I1205 17:06:41.920779 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4l4dt\" (UniqueName: \"kubernetes.io/projected/27067136-5ba5-407b-a4a5-4d1e8c284564-kube-api-access-4l4dt\") pod \"control-plane-machine-set-operator-78cbb6b69f-pcb9b\" (UID: \"27067136-5ba5-407b-a4a5-4d1e8c284564\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-pcb9b" Dec 05 17:06:41 crc kubenswrapper[4753]: I1205 17:06:41.922687 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fcjns\" (UniqueName: \"kubernetes.io/projected/c81db375-2956-49d8-856b-90395544e758-kube-api-access-fcjns\") pod \"multus-admission-controller-857f4d67dd-tqddg\" (UID: \"c81db375-2956-49d8-856b-90395544e758\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-tqddg" Dec 05 17:06:41 crc kubenswrapper[4753]: I1205 17:06:41.923413 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:06:41 crc kubenswrapper[4753]: E1205 17:06:41.923566 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:06:42.423547968 +0000 UTC m=+140.926654974 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:41 crc kubenswrapper[4753]: I1205 17:06:41.923709 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:41 crc kubenswrapper[4753]: E1205 17:06:41.924405 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:06:42.424393573 +0000 UTC m=+140.927500579 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4vvd9" (UID: "abca9c47-e52b-4410-83e1-b486f8f01aca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:41 crc kubenswrapper[4753]: I1205 17:06:41.927538 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jwnb8\" (UniqueName: \"kubernetes.io/projected/c18256b2-4287-4df0-8819-201cf14c6380-kube-api-access-jwnb8\") pod \"collect-profiles-29415900-2lmxf\" (UID: \"c18256b2-4287-4df0-8819-201cf14c6380\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415900-2lmxf" Dec 05 17:06:41 crc kubenswrapper[4753]: W1205 17:06:41.954221 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbcea690d_529b_4175_b5fd_a1a07970cf0d.slice/crio-1734956136ea5b0d712392ebb4a923e913543c59eeb974bb1dc37a5536c9dae2 WatchSource:0}: Error finding container 1734956136ea5b0d712392ebb4a923e913543c59eeb974bb1dc37a5536c9dae2: Status 404 returned error can't find the container with id 1734956136ea5b0d712392ebb4a923e913543c59eeb974bb1dc37a5536c9dae2 Dec 05 17:06:41 crc kubenswrapper[4753]: W1205 17:06:41.963692 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod915c88c7_cac2_48b3_ab7a_6e23e7240465.slice/crio-11305d7e0499230bfc12453a58ac34d14e8072c0f05aa34ab17d0874546357dd WatchSource:0}: Error finding container 11305d7e0499230bfc12453a58ac34d14e8072c0f05aa34ab17d0874546357dd: Status 404 returned error can't find the container with id 11305d7e0499230bfc12453a58ac34d14e8072c0f05aa34ab17d0874546357dd Dec 05 17:06:42 crc kubenswrapper[4753]: I1205 17:06:42.007200 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-pcb9b" Dec 05 17:06:42 crc kubenswrapper[4753]: I1205 17:06:42.012404 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-kzdfh" event={"ID":"c3117f4b-6f3a-4131-9001-d39222e6f268","Type":"ContainerStarted","Data":"89cda6ec6470579314e60d6ab523d19e143eada5e55a3d46fab7c9ba48883d40"} Dec 05 17:06:42 crc kubenswrapper[4753]: I1205 17:06:42.014347 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-b7687" event={"ID":"915c88c7-cac2-48b3-ab7a-6e23e7240465","Type":"ContainerStarted","Data":"11305d7e0499230bfc12453a58ac34d14e8072c0f05aa34ab17d0874546357dd"} Dec 05 17:06:42 crc kubenswrapper[4753]: I1205 17:06:42.022243 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-tqddg" Dec 05 17:06:42 crc kubenswrapper[4753]: I1205 17:06:42.023257 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-vx9s2" event={"ID":"5cd4f96b-673e-4518-a8ee-da3ccb7a86b0","Type":"ContainerStarted","Data":"48edd8c5b0854030b090aff4a0d3be681357c6d452ba524160818fe3a3b408cc"} Dec 05 17:06:42 crc kubenswrapper[4753]: I1205 17:06:42.025884 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:06:42 crc kubenswrapper[4753]: E1205 17:06:42.026359 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:06:42.526311869 +0000 UTC m=+141.029418895 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:42 crc kubenswrapper[4753]: I1205 17:06:42.026971 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-q2bmk" event={"ID":"72117afc-2e5f-4696-b515-76aced63c30f","Type":"ContainerStarted","Data":"764eb6427301bfaa0f4801c06496be49a2d80f0d5ba3df2aaa04de1f0e3a2f77"} Dec 05 17:06:42 crc kubenswrapper[4753]: I1205 17:06:42.027054 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:42 crc kubenswrapper[4753]: E1205 17:06:42.028094 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:06:42.52806956 +0000 UTC m=+141.031176736 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4vvd9" (UID: "abca9c47-e52b-4410-83e1-b486f8f01aca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:42 crc kubenswrapper[4753]: I1205 17:06:42.030250 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-wxsmf" event={"ID":"07ba614e-886f-423d-8ddf-5b727931f58b","Type":"ContainerStarted","Data":"b5731b13d315721428442eee58e30216c3e276da794954157360a4578081c948"} Dec 05 17:06:42 crc kubenswrapper[4753]: I1205 17:06:42.035674 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-k49xj" event={"ID":"0f4f139a-e961-4510-9a82-a2de30587b6f","Type":"ContainerStarted","Data":"fca0aa46f593a29dade64e3a04a5c0e1173d64e135fd68466938bd12cc24cd48"} Dec 05 17:06:42 crc kubenswrapper[4753]: I1205 17:06:42.038886 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-44d5b" event={"ID":"73058959-ba27-4d94-8662-a213a53113e9","Type":"ContainerStarted","Data":"e8d690831ba3887ddeaefc28500e3a7f46d372faa346ec00fc60e4f0fffea582"} Dec 05 17:06:42 crc kubenswrapper[4753]: I1205 17:06:42.041355 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-xhb8z" event={"ID":"cf24b057-f212-46d9-a3f8-0ef08669940d","Type":"ContainerStarted","Data":"c55b5d3963771928deb27543983c6b48c0ddd234fcce305cccc44fa48e274365"} Dec 05 17:06:42 crc kubenswrapper[4753]: I1205 17:06:42.042616 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-rkmxh" event={"ID":"782feb3b-5fe4-413e-87a0-9602f412897e","Type":"ContainerStarted","Data":"3de878be23950c7c50144af028684590098328c87d7e2f116df994208ed5d7c5"} Dec 05 17:06:42 crc kubenswrapper[4753]: I1205 17:06:42.049585 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-4gllz" event={"ID":"bcea690d-529b-4175-b5fd-a1a07970cf0d","Type":"ContainerStarted","Data":"1734956136ea5b0d712392ebb4a923e913543c59eeb974bb1dc37a5536c9dae2"} Dec 05 17:06:42 crc kubenswrapper[4753]: I1205 17:06:42.058594 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jwgnx" event={"ID":"002aa1a9-0253-4b17-8c8c-d23c830c46cc","Type":"ContainerStarted","Data":"1087c3693bb8d526f6ad4a1649d0102172735dc093a4866116ce3266635de299"} Dec 05 17:06:42 crc kubenswrapper[4753]: I1205 17:06:42.060502 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nhbkn" event={"ID":"3d179b2f-6775-4d3a-be36-3960799428db","Type":"ContainerStarted","Data":"db028401def4b74c9eb68455e692fb0db9c26a728aed1fb8399974601b57b181"} Dec 05 17:06:42 crc kubenswrapper[4753]: I1205 17:06:42.082595 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-9c622" event={"ID":"48038042-7b0f-48d9-9f90-6c0b9dd179d6","Type":"ContainerStarted","Data":"600a4a2dec294e252cfc6e7db3796b7533eddfe0cf4eda204d3f2bc5de668196"} Dec 05 17:06:42 crc kubenswrapper[4753]: I1205 17:06:42.085799 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415900-2lmxf" Dec 05 17:06:42 crc kubenswrapper[4753]: I1205 17:06:42.086098 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-lcd59" event={"ID":"c9582e4b-c48a-470f-a0f7-9cbad68d972f","Type":"ContainerStarted","Data":"0f6e0f05e28ef5159b18cff848a53459d80d650d7503d105d132abeb830a77f7"} Dec 05 17:06:42 crc kubenswrapper[4753]: I1205 17:06:42.087406 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zdzdl" event={"ID":"40a88e42-a0d7-4703-a3f8-25f524f90eca","Type":"ContainerStarted","Data":"b9bf6d52dd5fca42c06a8dff74aed2b431e883c2fd433bc12b37fec10cd2d0b2"} Dec 05 17:06:42 crc kubenswrapper[4753]: I1205 17:06:42.094865 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-hzrmq" event={"ID":"92cd61ad-2356-4854-ad74-bde03a83abf0","Type":"ContainerStarted","Data":"4a88c1d95265ce4b54360d060ed39bb9d076c2a809d7d46a2d014599af14aab1"} Dec 05 17:06:42 crc kubenswrapper[4753]: I1205 17:06:42.106007 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-9j8t7" Dec 05 17:06:42 crc kubenswrapper[4753]: I1205 17:06:42.115520 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-64nzp" event={"ID":"1e11d89c-e71a-4a17-b1dd-da3883753fde","Type":"ContainerStarted","Data":"63dd5fb1afbf76b2892502fdd57d2d689279f21aebd41da19300251137b99a44"} Dec 05 17:06:42 crc kubenswrapper[4753]: I1205 17:06:42.120130 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-2nb5t" event={"ID":"f119bafa-ed3a-42d2-876f-c63999b216e1","Type":"ContainerStarted","Data":"8afaf04e66a40aed6b8977eca8083aa7c78a0f27509493c8b272bb550d007167"} Dec 05 17:06:42 crc kubenswrapper[4753]: I1205 17:06:42.124063 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-rnr95" event={"ID":"12c0356d-b61a-47e3-a93d-8d2d743cc9b5","Type":"ContainerStarted","Data":"773fd92d8df050e2003dcd61ce943699e31ad22ecbecb6e7f95b6bd5890d7a3e"} Dec 05 17:06:42 crc kubenswrapper[4753]: I1205 17:06:42.128319 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:06:42 crc kubenswrapper[4753]: E1205 17:06:42.132231 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:06:42.63217503 +0000 UTC m=+141.135282036 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:42 crc kubenswrapper[4753]: I1205 17:06:42.132532 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:42 crc kubenswrapper[4753]: E1205 17:06:42.133056 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:06:42.633047076 +0000 UTC m=+141.136154082 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4vvd9" (UID: "abca9c47-e52b-4410-83e1-b486f8f01aca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:42 crc kubenswrapper[4753]: I1205 17:06:42.144363 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-w58zz" event={"ID":"5881adf5-7728-46d0-b449-b0d8b0d77c7d","Type":"ContainerStarted","Data":"514bab0ad399c6e9c354fb4a9a15a97e9c139b0570733f384a6c3e8fe8d58b18"} Dec 05 17:06:42 crc kubenswrapper[4753]: I1205 17:06:42.150471 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-hqm52" event={"ID":"e87071bd-a323-4f69-89a9-7c2a37bb27a4","Type":"ContainerStarted","Data":"7cd98170e099df83f1759668d046fc9aeaa6e4046f6ea52b8334dc954bd08917"} Dec 05 17:06:42 crc kubenswrapper[4753]: I1205 17:06:42.233318 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:06:42 crc kubenswrapper[4753]: E1205 17:06:42.233758 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:06:42.733716434 +0000 UTC m=+141.236823430 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:42 crc kubenswrapper[4753]: I1205 17:06:42.233885 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:42 crc kubenswrapper[4753]: E1205 17:06:42.234364 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:06:42.734352593 +0000 UTC m=+141.237459769 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4vvd9" (UID: "abca9c47-e52b-4410-83e1-b486f8f01aca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:42 crc kubenswrapper[4753]: I1205 17:06:42.241447 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-55drw"] Dec 05 17:06:42 crc kubenswrapper[4753]: I1205 17:06:42.272569 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-69bc9"] Dec 05 17:06:42 crc kubenswrapper[4753]: I1205 17:06:42.310078 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mjbr2"] Dec 05 17:06:42 crc kubenswrapper[4753]: I1205 17:06:42.334791 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:06:42 crc kubenswrapper[4753]: E1205 17:06:42.334969 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:06:42.834933559 +0000 UTC m=+141.338040565 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:42 crc kubenswrapper[4753]: I1205 17:06:42.335221 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:42 crc kubenswrapper[4753]: E1205 17:06:42.335648 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:06:42.83563683 +0000 UTC m=+141.338744016 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4vvd9" (UID: "abca9c47-e52b-4410-83e1-b486f8f01aca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:42 crc kubenswrapper[4753]: I1205 17:06:42.393823 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-djddj"] Dec 05 17:06:42 crc kubenswrapper[4753]: W1205 17:06:42.408202 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6b4d13e0_6218_444d_822b_656abbf0b80c.slice/crio-025cd491704217c724a80834af503a156b71bae2bbb0a55acb6859d556dc71c8 WatchSource:0}: Error finding container 025cd491704217c724a80834af503a156b71bae2bbb0a55acb6859d556dc71c8: Status 404 returned error can't find the container with id 025cd491704217c724a80834af503a156b71bae2bbb0a55acb6859d556dc71c8 Dec 05 17:06:42 crc kubenswrapper[4753]: W1205 17:06:42.427111 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcbcd15e6_1457_4722_b319_13651ca7598b.slice/crio-a919f704aafa7512e41eae217dc8a8c4aa3ce2871440e86c278b81cff27f1e04 WatchSource:0}: Error finding container a919f704aafa7512e41eae217dc8a8c4aa3ce2871440e86c278b81cff27f1e04: Status 404 returned error can't find the container with id a919f704aafa7512e41eae217dc8a8c4aa3ce2871440e86c278b81cff27f1e04 Dec 05 17:06:42 crc kubenswrapper[4753]: I1205 17:06:42.437366 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:06:42 crc kubenswrapper[4753]: E1205 17:06:42.437854 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:06:42.937830063 +0000 UTC m=+141.440937069 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:42 crc kubenswrapper[4753]: W1205 17:06:42.464617 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc8b527e5_cd18_4b81_aa80_0c7004486286.slice/crio-f6d1e4240d51a55c74b108f5da915ccf1643040801e2ef538056e75f5b8a75ed WatchSource:0}: Error finding container f6d1e4240d51a55c74b108f5da915ccf1643040801e2ef538056e75f5b8a75ed: Status 404 returned error can't find the container with id f6d1e4240d51a55c74b108f5da915ccf1643040801e2ef538056e75f5b8a75ed Dec 05 17:06:42 crc kubenswrapper[4753]: I1205 17:06:42.539080 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:42 crc kubenswrapper[4753]: E1205 17:06:42.539786 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:06:43.039768949 +0000 UTC m=+141.542875955 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4vvd9" (UID: "abca9c47-e52b-4410-83e1-b486f8f01aca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:42 crc kubenswrapper[4753]: I1205 17:06:42.579123 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-s87ds"] Dec 05 17:06:42 crc kubenswrapper[4753]: I1205 17:06:42.604761 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-pcb9b"] Dec 05 17:06:42 crc kubenswrapper[4753]: I1205 17:06:42.640686 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:06:42 crc kubenswrapper[4753]: E1205 17:06:42.641197 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:06:43.141175359 +0000 UTC m=+141.644282365 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:42 crc kubenswrapper[4753]: W1205 17:06:42.676308 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod24e4585c_4850_4101_a40d_b38424860805.slice/crio-3ab9306170963bb504d4d7a89b32b3798de6fdd130526fc98ed39f86fa1ede9e WatchSource:0}: Error finding container 3ab9306170963bb504d4d7a89b32b3798de6fdd130526fc98ed39f86fa1ede9e: Status 404 returned error can't find the container with id 3ab9306170963bb504d4d7a89b32b3798de6fdd130526fc98ed39f86fa1ede9e Dec 05 17:06:42 crc kubenswrapper[4753]: I1205 17:06:42.751479 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:42 crc kubenswrapper[4753]: E1205 17:06:42.752236 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:06:43.252028728 +0000 UTC m=+141.755135734 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4vvd9" (UID: "abca9c47-e52b-4410-83e1-b486f8f01aca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:42 crc kubenswrapper[4753]: I1205 17:06:42.784932 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-rnr95" podStartSLOduration=120.784908308 podStartE2EDuration="2m0.784908308s" podCreationTimestamp="2025-12-05 17:04:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:06:42.784073593 +0000 UTC m=+141.287180599" watchObservedRunningTime="2025-12-05 17:06:42.784908308 +0000 UTC m=+141.288015314" Dec 05 17:06:42 crc kubenswrapper[4753]: I1205 17:06:42.856209 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:06:42 crc kubenswrapper[4753]: E1205 17:06:42.857120 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:06:43.357097696 +0000 UTC m=+141.860204702 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:42 crc kubenswrapper[4753]: I1205 17:06:42.949784 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-22ncf"] Dec 05 17:06:42 crc kubenswrapper[4753]: I1205 17:06:42.964086 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:42 crc kubenswrapper[4753]: E1205 17:06:42.964757 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:06:43.46473466 +0000 UTC m=+141.967841866 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4vvd9" (UID: "abca9c47-e52b-4410-83e1-b486f8f01aca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:42 crc kubenswrapper[4753]: W1205 17:06:42.996004 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod03fd3f93_fb41_4e32_8276_416c40f2b9a7.slice/crio-6f743b9240b7cd5a1999f9913cfc814045eed2770b705f12f3dc561423e322c8 WatchSource:0}: Error finding container 6f743b9240b7cd5a1999f9913cfc814045eed2770b705f12f3dc561423e322c8: Status 404 returned error can't find the container with id 6f743b9240b7cd5a1999f9913cfc814045eed2770b705f12f3dc561423e322c8 Dec 05 17:06:43 crc kubenswrapper[4753]: I1205 17:06:43.072439 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:06:43 crc kubenswrapper[4753]: E1205 17:06:43.073514 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:06:43.573489877 +0000 UTC m=+142.076596883 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:43 crc kubenswrapper[4753]: I1205 17:06:43.136244 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-hq6h7"] Dec 05 17:06:43 crc kubenswrapper[4753]: I1205 17:06:43.148285 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tfkgs"] Dec 05 17:06:43 crc kubenswrapper[4753]: I1205 17:06:43.175227 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:43 crc kubenswrapper[4753]: E1205 17:06:43.175597 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:06:43.675586078 +0000 UTC m=+142.178693084 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4vvd9" (UID: "abca9c47-e52b-4410-83e1-b486f8f01aca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:43 crc kubenswrapper[4753]: I1205 17:06:43.209088 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-22ncf" event={"ID":"d4774a0f-e7ca-49bf-a25e-c2a88a7cdc3b","Type":"ContainerStarted","Data":"c81980886a640bec4d6cfd77ddc188139e70eb069d2ea5e129f839a30dc8af74"} Dec 05 17:06:43 crc kubenswrapper[4753]: I1205 17:06:43.222510 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-hqm52" event={"ID":"e87071bd-a323-4f69-89a9-7c2a37bb27a4","Type":"ContainerStarted","Data":"5dffb99cb0faf1a2c03f217aff55daeb461e6111e59ba66a5f6b67a1bb40ecd2"} Dec 05 17:06:43 crc kubenswrapper[4753]: I1205 17:06:43.224502 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-7zfzd" event={"ID":"0aa048c9-05f2-41f3-ad26-c88d5c98c453","Type":"ContainerStarted","Data":"08081215dda0c7592b9709febd23a757a76cff31840f88cf1829bf956a6a6a08"} Dec 05 17:06:43 crc kubenswrapper[4753]: I1205 17:06:43.224946 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-7zfzd" Dec 05 17:06:43 crc kubenswrapper[4753]: I1205 17:06:43.229716 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-vx9s2" event={"ID":"5cd4f96b-673e-4518-a8ee-da3ccb7a86b0","Type":"ContainerStarted","Data":"f95931a4d1a6768055a4c8aa60404e2fc5eafc1865012ddbbd0a0cc55ee0c3c4"} Dec 05 17:06:43 crc kubenswrapper[4753]: I1205 17:06:43.232419 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-q2bmk" event={"ID":"72117afc-2e5f-4696-b515-76aced63c30f","Type":"ContainerStarted","Data":"78334f59f1b3d3644aa3fd45d52f5532eb1501fb0a0c82b7a1a1aa62b8c69ff5"} Dec 05 17:06:43 crc kubenswrapper[4753]: I1205 17:06:43.258109 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-wxsmf" event={"ID":"07ba614e-886f-423d-8ddf-5b727931f58b","Type":"ContainerStarted","Data":"b0151ce1efc0fd69f8e65e751b55cc204f3371de332836069b7dbb5cf431bcd6"} Dec 05 17:06:43 crc kubenswrapper[4753]: I1205 17:06:43.279011 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:06:43 crc kubenswrapper[4753]: E1205 17:06:43.279895 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:06:43.779869913 +0000 UTC m=+142.282976919 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:43 crc kubenswrapper[4753]: I1205 17:06:43.280264 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-7zfzd" podStartSLOduration=120.280241074 podStartE2EDuration="2m0.280241074s" podCreationTimestamp="2025-12-05 17:04:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:06:43.244681955 +0000 UTC m=+141.747788951" watchObservedRunningTime="2025-12-05 17:06:43.280241074 +0000 UTC m=+141.783348070" Dec 05 17:06:43 crc kubenswrapper[4753]: I1205 17:06:43.286225 4753 generic.go:334] "Generic (PLEG): container finished" podID="bcea690d-529b-4175-b5fd-a1a07970cf0d" containerID="097b4869ee41406f8d7baa50068399bd7e960ad6ece8fd74f5bcde9b8e70230b" exitCode=0 Dec 05 17:06:43 crc kubenswrapper[4753]: I1205 17:06:43.286333 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-4gllz" event={"ID":"bcea690d-529b-4175-b5fd-a1a07970cf0d","Type":"ContainerDied","Data":"097b4869ee41406f8d7baa50068399bd7e960ad6ece8fd74f5bcde9b8e70230b"} Dec 05 17:06:43 crc kubenswrapper[4753]: I1205 17:06:43.290941 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-djddj" event={"ID":"24e4585c-4850-4101-a40d-b38424860805","Type":"ContainerStarted","Data":"3ab9306170963bb504d4d7a89b32b3798de6fdd130526fc98ed39f86fa1ede9e"} Dec 05 17:06:43 crc kubenswrapper[4753]: I1205 17:06:43.291184 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-wxsmf" podStartSLOduration=5.291156606 podStartE2EDuration="5.291156606s" podCreationTimestamp="2025-12-05 17:06:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:06:43.279698368 +0000 UTC m=+141.782805374" watchObservedRunningTime="2025-12-05 17:06:43.291156606 +0000 UTC m=+141.794263622" Dec 05 17:06:43 crc kubenswrapper[4753]: I1205 17:06:43.295881 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mjbr2" event={"ID":"c8b527e5-cd18-4b81-aa80-0c7004486286","Type":"ContainerStarted","Data":"f6d1e4240d51a55c74b108f5da915ccf1643040801e2ef538056e75f5b8a75ed"} Dec 05 17:06:43 crc kubenswrapper[4753]: I1205 17:06:43.304870 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-flncn"] Dec 05 17:06:43 crc kubenswrapper[4753]: I1205 17:06:43.339071 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-69bc9" event={"ID":"cbcd15e6-1457-4722-b319-13651ca7598b","Type":"ContainerStarted","Data":"a919f704aafa7512e41eae217dc8a8c4aa3ce2871440e86c278b81cff27f1e04"} Dec 05 17:06:43 crc kubenswrapper[4753]: I1205 17:06:43.342816 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-wk9v7" event={"ID":"4bad5655-05a5-4081-868b-3d2e69df620f","Type":"ContainerStarted","Data":"17099b215b68a3bcc19884aa0308e15ab9f5ac098b8bc15f941a70584c7d1988"} Dec 05 17:06:43 crc kubenswrapper[4753]: I1205 17:06:43.342859 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-wk9v7" event={"ID":"4bad5655-05a5-4081-868b-3d2e69df620f","Type":"ContainerStarted","Data":"4b4dcbc915651d891bf978719d1f744b06a4d65f59bb2ed44ef3046de75e8b62"} Dec 05 17:06:43 crc kubenswrapper[4753]: I1205 17:06:43.347310 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-pcb9b" event={"ID":"27067136-5ba5-407b-a4a5-4d1e8c284564","Type":"ContainerStarted","Data":"5bb507ed86f09ca3a96947b7b9de8d4b3fded4759f150262df401e3524b89dc9"} Dec 05 17:06:43 crc kubenswrapper[4753]: I1205 17:06:43.349688 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-4ctlc" event={"ID":"1b6a5e8c-2aea-47e2-802c-8604814dbf18","Type":"ContainerStarted","Data":"e86c9c147e2b6915cd1c4ecd5b523d93cd130756beb07b6a02dcc65749c29025"} Dec 05 17:06:43 crc kubenswrapper[4753]: I1205 17:06:43.381824 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:43 crc kubenswrapper[4753]: E1205 17:06:43.382361 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:06:43.882337814 +0000 UTC m=+142.385445020 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4vvd9" (UID: "abca9c47-e52b-4410-83e1-b486f8f01aca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:43 crc kubenswrapper[4753]: I1205 17:06:43.386538 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-s87ds" event={"ID":"03fd3f93-fb41-4e32-8276-416c40f2b9a7","Type":"ContainerStarted","Data":"6f743b9240b7cd5a1999f9913cfc814045eed2770b705f12f3dc561423e322c8"} Dec 05 17:06:43 crc kubenswrapper[4753]: I1205 17:06:43.403896 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-b7687" Dec 05 17:06:43 crc kubenswrapper[4753]: I1205 17:06:43.410867 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nhbkn" event={"ID":"3d179b2f-6775-4d3a-be36-3960799428db","Type":"ContainerStarted","Data":"6645a08c9b8e4760d504a10a53cbc5bdd20f61dd0a61f233da6977c35ecba472"} Dec 05 17:06:43 crc kubenswrapper[4753]: I1205 17:06:43.430588 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-b7687" podStartSLOduration=121.430564376 podStartE2EDuration="2m1.430564376s" podCreationTimestamp="2025-12-05 17:04:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:06:43.426356642 +0000 UTC m=+141.929463658" watchObservedRunningTime="2025-12-05 17:06:43.430564376 +0000 UTC m=+141.933671382" Dec 05 17:06:43 crc kubenswrapper[4753]: I1205 17:06:43.436330 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-64nzp" event={"ID":"1e11d89c-e71a-4a17-b1dd-da3883753fde","Type":"ContainerStarted","Data":"872491a56481c9f671c1305b9c0f7cb8db08de38043e3aa21926e62a7e918ccb"} Dec 05 17:06:43 crc kubenswrapper[4753]: I1205 17:06:43.437289 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-64nzp" Dec 05 17:06:43 crc kubenswrapper[4753]: I1205 17:06:43.449546 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nhbkn" podStartSLOduration=121.449511975 podStartE2EDuration="2m1.449511975s" podCreationTimestamp="2025-12-05 17:04:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:06:43.447670581 +0000 UTC m=+141.950777587" watchObservedRunningTime="2025-12-05 17:06:43.449511975 +0000 UTC m=+141.952618991" Dec 05 17:06:43 crc kubenswrapper[4753]: I1205 17:06:43.461279 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-9c622" event={"ID":"48038042-7b0f-48d9-9f90-6c0b9dd179d6","Type":"ContainerStarted","Data":"0b4af9c9d235e28f01dfbf710b0a34ec240e146a82006c133a6058d0942accf6"} Dec 05 17:06:43 crc kubenswrapper[4753]: I1205 17:06:43.475381 4753 patch_prober.go:28] interesting pod/console-operator-58897d9998-64nzp container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.12:8443/readyz\": dial tcp 10.217.0.12:8443: connect: connection refused" start-of-body= Dec 05 17:06:43 crc kubenswrapper[4753]: I1205 17:06:43.475424 4753 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-64nzp" podUID="1e11d89c-e71a-4a17-b1dd-da3883753fde" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.12:8443/readyz\": dial tcp 10.217.0.12:8443: connect: connection refused" Dec 05 17:06:43 crc kubenswrapper[4753]: I1205 17:06:43.475497 4753 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-b7687 container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" start-of-body= Dec 05 17:06:43 crc kubenswrapper[4753]: I1205 17:06:43.475595 4753 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-b7687" podUID="915c88c7-cac2-48b3-ab7a-6e23e7240465" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" Dec 05 17:06:43 crc kubenswrapper[4753]: I1205 17:06:43.486826 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:06:43 crc kubenswrapper[4753]: I1205 17:06:43.488833 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-64nzp" podStartSLOduration=121.488813294 podStartE2EDuration="2m1.488813294s" podCreationTimestamp="2025-12-05 17:04:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:06:43.488530926 +0000 UTC m=+141.991637932" watchObservedRunningTime="2025-12-05 17:06:43.488813294 +0000 UTC m=+141.991920300" Dec 05 17:06:43 crc kubenswrapper[4753]: E1205 17:06:43.489389 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:06:43.98934125 +0000 UTC m=+142.492448256 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:43 crc kubenswrapper[4753]: I1205 17:06:43.495213 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:43 crc kubenswrapper[4753]: E1205 17:06:43.496561 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:06:43.996547122 +0000 UTC m=+142.499654128 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4vvd9" (UID: "abca9c47-e52b-4410-83e1-b486f8f01aca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:43 crc kubenswrapper[4753]: I1205 17:06:43.515909 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-2nb5t" event={"ID":"f119bafa-ed3a-42d2-876f-c63999b216e1","Type":"ContainerStarted","Data":"f69e0f9768985125f12efc4731b586eb924b8f2bfa46ca446ec54bca86dc2a37"} Dec 05 17:06:43 crc kubenswrapper[4753]: I1205 17:06:43.517251 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-2nb5t" Dec 05 17:06:43 crc kubenswrapper[4753]: I1205 17:06:43.522975 4753 patch_prober.go:28] interesting pod/downloads-7954f5f757-2nb5t container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" start-of-body= Dec 05 17:06:43 crc kubenswrapper[4753]: I1205 17:06:43.523034 4753 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-2nb5t" podUID="f119bafa-ed3a-42d2-876f-c63999b216e1" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" Dec 05 17:06:43 crc kubenswrapper[4753]: I1205 17:06:43.528934 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-55drw" event={"ID":"6b4d13e0-6218-444d-822b-656abbf0b80c","Type":"ContainerStarted","Data":"025cd491704217c724a80834af503a156b71bae2bbb0a55acb6859d556dc71c8"} Dec 05 17:06:43 crc kubenswrapper[4753]: I1205 17:06:43.535637 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-9tzjp" event={"ID":"de65df6c-3a9a-4041-a67c-e6cbd766b4b2","Type":"ContainerStarted","Data":"b7df11e72537b86ee7ea9476cba633ad007e6f2b7d5d9ef1b3a32bd642577439"} Dec 05 17:06:43 crc kubenswrapper[4753]: I1205 17:06:43.597074 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:06:43 crc kubenswrapper[4753]: E1205 17:06:43.598682 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:06:44.098662123 +0000 UTC m=+142.601769139 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:43 crc kubenswrapper[4753]: I1205 17:06:43.621459 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-9c622" podStartSLOduration=121.621434865 podStartE2EDuration="2m1.621434865s" podCreationTimestamp="2025-12-05 17:04:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:06:43.618380675 +0000 UTC m=+142.121487681" watchObservedRunningTime="2025-12-05 17:06:43.621434865 +0000 UTC m=+142.124541871" Dec 05 17:06:43 crc kubenswrapper[4753]: I1205 17:06:43.673331 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-wqn7l"] Dec 05 17:06:43 crc kubenswrapper[4753]: I1205 17:06:43.678702 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-2nb5t" podStartSLOduration=121.678680023 podStartE2EDuration="2m1.678680023s" podCreationTimestamp="2025-12-05 17:04:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:06:43.662592988 +0000 UTC m=+142.165700014" watchObservedRunningTime="2025-12-05 17:06:43.678680023 +0000 UTC m=+142.181787029" Dec 05 17:06:43 crc kubenswrapper[4753]: I1205 17:06:43.698812 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:43 crc kubenswrapper[4753]: E1205 17:06:43.699254 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:06:44.199242049 +0000 UTC m=+142.702349055 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4vvd9" (UID: "abca9c47-e52b-4410-83e1-b486f8f01aca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:43 crc kubenswrapper[4753]: I1205 17:06:43.800824 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:06:43 crc kubenswrapper[4753]: E1205 17:06:43.801132 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:06:44.301114923 +0000 UTC m=+142.804221929 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:43 crc kubenswrapper[4753]: I1205 17:06:43.834922 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-7zfzd" Dec 05 17:06:43 crc kubenswrapper[4753]: I1205 17:06:43.886019 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415900-2lmxf"] Dec 05 17:06:43 crc kubenswrapper[4753]: I1205 17:06:43.901900 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:43 crc kubenswrapper[4753]: E1205 17:06:43.902272 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:06:44.402260615 +0000 UTC m=+142.905367621 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4vvd9" (UID: "abca9c47-e52b-4410-83e1-b486f8f01aca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:43 crc kubenswrapper[4753]: I1205 17:06:43.929753 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-tqddg"] Dec 05 17:06:43 crc kubenswrapper[4753]: I1205 17:06:43.946507 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-9j8t7"] Dec 05 17:06:43 crc kubenswrapper[4753]: I1205 17:06:43.950167 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-m9rf5"] Dec 05 17:06:44 crc kubenswrapper[4753]: I1205 17:06:44.005566 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:06:44 crc kubenswrapper[4753]: E1205 17:06:44.005789 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:06:44.505738997 +0000 UTC m=+143.008846003 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:44 crc kubenswrapper[4753]: I1205 17:06:44.005988 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:44 crc kubenswrapper[4753]: E1205 17:06:44.006758 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:06:44.506731266 +0000 UTC m=+143.009838272 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4vvd9" (UID: "abca9c47-e52b-4410-83e1-b486f8f01aca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:44 crc kubenswrapper[4753]: I1205 17:06:44.013981 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-rvmt6"] Dec 05 17:06:44 crc kubenswrapper[4753]: W1205 17:06:44.075901 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc81db375_2956_49d8_856b_90395544e758.slice/crio-6739f8fc4aef2f9fe230f7204735001ea647eae7c30069de7226e3e03aa5dc1d WatchSource:0}: Error finding container 6739f8fc4aef2f9fe230f7204735001ea647eae7c30069de7226e3e03aa5dc1d: Status 404 returned error can't find the container with id 6739f8fc4aef2f9fe230f7204735001ea647eae7c30069de7226e3e03aa5dc1d Dec 05 17:06:44 crc kubenswrapper[4753]: I1205 17:06:44.107108 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:06:44 crc kubenswrapper[4753]: E1205 17:06:44.108084 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:06:44.608051774 +0000 UTC m=+143.111158780 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:44 crc kubenswrapper[4753]: I1205 17:06:44.210277 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:44 crc kubenswrapper[4753]: E1205 17:06:44.211025 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:06:44.71101028 +0000 UTC m=+143.214117286 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4vvd9" (UID: "abca9c47-e52b-4410-83e1-b486f8f01aca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:44 crc kubenswrapper[4753]: I1205 17:06:44.312478 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:06:44 crc kubenswrapper[4753]: E1205 17:06:44.312973 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:06:44.812946976 +0000 UTC m=+143.316053982 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:44 crc kubenswrapper[4753]: I1205 17:06:44.415941 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:44 crc kubenswrapper[4753]: E1205 17:06:44.416283 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:06:44.916272172 +0000 UTC m=+143.419379178 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4vvd9" (UID: "abca9c47-e52b-4410-83e1-b486f8f01aca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:44 crc kubenswrapper[4753]: I1205 17:06:44.517408 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:06:44 crc kubenswrapper[4753]: E1205 17:06:44.518335 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:06:45.018321282 +0000 UTC m=+143.521428288 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:44 crc kubenswrapper[4753]: I1205 17:06:44.621865 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:44 crc kubenswrapper[4753]: E1205 17:06:44.624170 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:06:45.124109871 +0000 UTC m=+143.627216877 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4vvd9" (UID: "abca9c47-e52b-4410-83e1-b486f8f01aca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:44 crc kubenswrapper[4753]: I1205 17:06:44.647122 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-k49xj" event={"ID":"0f4f139a-e961-4510-9a82-a2de30587b6f","Type":"ContainerStarted","Data":"d89ae1ae0146b0d3ebce7b1056c9db9278b2c6760757f4a71c48a4dd35e02b35"} Dec 05 17:06:44 crc kubenswrapper[4753]: I1205 17:06:44.723956 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:06:44 crc kubenswrapper[4753]: E1205 17:06:44.724289 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:06:45.224267344 +0000 UTC m=+143.727374421 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:44 crc kubenswrapper[4753]: I1205 17:06:44.729546 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-4ctlc" event={"ID":"1b6a5e8c-2aea-47e2-802c-8604814dbf18","Type":"ContainerStarted","Data":"36b695a819033af53d69ae48e5d9137eb1522d1aa36141bfb1d947b990880b6a"} Dec 05 17:06:44 crc kubenswrapper[4753]: I1205 17:06:44.746854 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-k49xj" podStartSLOduration=122.74683491 podStartE2EDuration="2m2.74683491s" podCreationTimestamp="2025-12-05 17:04:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:06:44.723917934 +0000 UTC m=+143.227024940" watchObservedRunningTime="2025-12-05 17:06:44.74683491 +0000 UTC m=+143.249941916" Dec 05 17:06:44 crc kubenswrapper[4753]: I1205 17:06:44.756472 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tfkgs" event={"ID":"bb0b91f2-5d2a-4bd3-92da-6573800548ff","Type":"ContainerStarted","Data":"638cb2b73095051fe935ee8095dfb8ecabd3879fe2bc46cc8cc3d719ebdc7a70"} Dec 05 17:06:44 crc kubenswrapper[4753]: I1205 17:06:44.777972 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-55drw" event={"ID":"6b4d13e0-6218-444d-822b-656abbf0b80c","Type":"ContainerStarted","Data":"5d47728450e5a4db0b3fc8ec4df70bd3b05e5fec01d3f42cfe2231d9a2125aaf"} Dec 05 17:06:44 crc kubenswrapper[4753]: I1205 17:06:44.825579 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:44 crc kubenswrapper[4753]: E1205 17:06:44.827313 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:06:45.327290082 +0000 UTC m=+143.830397308 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4vvd9" (UID: "abca9c47-e52b-4410-83e1-b486f8f01aca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:44 crc kubenswrapper[4753]: I1205 17:06:44.918720 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-b7687" event={"ID":"915c88c7-cac2-48b3-ab7a-6e23e7240465","Type":"ContainerStarted","Data":"aca4075916601dd74461fb12a7e864057dce2809c67c6d004bc051852e118544"} Dec 05 17:06:44 crc kubenswrapper[4753]: I1205 17:06:44.928789 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:06:44 crc kubenswrapper[4753]: E1205 17:06:44.930265 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:06:45.430250828 +0000 UTC m=+143.933357834 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:44 crc kubenswrapper[4753]: I1205 17:06:44.933973 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-b7687" Dec 05 17:06:44 crc kubenswrapper[4753]: I1205 17:06:44.954937 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-tqddg" event={"ID":"c81db375-2956-49d8-856b-90395544e758","Type":"ContainerStarted","Data":"6739f8fc4aef2f9fe230f7204735001ea647eae7c30069de7226e3e03aa5dc1d"} Dec 05 17:06:44 crc kubenswrapper[4753]: I1205 17:06:44.985695 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-vx9s2" podStartSLOduration=121.985673713 podStartE2EDuration="2m1.985673713s" podCreationTimestamp="2025-12-05 17:04:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:06:44.984082026 +0000 UTC m=+143.487189032" watchObservedRunningTime="2025-12-05 17:06:44.985673713 +0000 UTC m=+143.488780719" Dec 05 17:06:44 crc kubenswrapper[4753]: I1205 17:06:44.987045 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-4ctlc" podStartSLOduration=6.987039423 podStartE2EDuration="6.987039423s" podCreationTimestamp="2025-12-05 17:06:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:06:44.918695928 +0000 UTC m=+143.421802934" watchObservedRunningTime="2025-12-05 17:06:44.987039423 +0000 UTC m=+143.490146429" Dec 05 17:06:45 crc kubenswrapper[4753]: I1205 17:06:45.027079 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-wqn7l" event={"ID":"3f2ff5f2-b8e0-4d37-80df-3ff278b01d0a","Type":"ContainerStarted","Data":"1da874898cb30deadaf6d8d8834f7e2bc0043ee562d348d6bae3addb6240e023"} Dec 05 17:06:45 crc kubenswrapper[4753]: I1205 17:06:45.036481 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:45 crc kubenswrapper[4753]: E1205 17:06:45.036871 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:06:45.536858532 +0000 UTC m=+144.039965538 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4vvd9" (UID: "abca9c47-e52b-4410-83e1-b486f8f01aca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:45 crc kubenswrapper[4753]: I1205 17:06:45.053473 4753 generic.go:334] "Generic (PLEG): container finished" podID="40a88e42-a0d7-4703-a3f8-25f524f90eca" containerID="a0440140881b0e722828ff40f539940e7ee5d6791fcd435337668068d3537a52" exitCode=0 Dec 05 17:06:45 crc kubenswrapper[4753]: I1205 17:06:45.053957 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zdzdl" event={"ID":"40a88e42-a0d7-4703-a3f8-25f524f90eca","Type":"ContainerDied","Data":"a0440140881b0e722828ff40f539940e7ee5d6791fcd435337668068d3537a52"} Dec 05 17:06:45 crc kubenswrapper[4753]: I1205 17:06:45.137221 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:06:45 crc kubenswrapper[4753]: E1205 17:06:45.137579 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:06:45.637562861 +0000 UTC m=+144.140669857 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:45 crc kubenswrapper[4753]: I1205 17:06:45.146444 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-44d5b" event={"ID":"73058959-ba27-4d94-8662-a213a53113e9","Type":"ContainerStarted","Data":"7d259772b1d90a4934586f510afb1f8cb74082b7dc4d58707fb45d1d5d9f27fd"} Dec 05 17:06:45 crc kubenswrapper[4753]: I1205 17:06:45.168505 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-22ncf" event={"ID":"d4774a0f-e7ca-49bf-a25e-c2a88a7cdc3b","Type":"ContainerStarted","Data":"e66fc56c16d45e47f09694570e6b297edc1b03d1e7f3588076c83e713a07fc91"} Dec 05 17:06:45 crc kubenswrapper[4753]: I1205 17:06:45.190352 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-44d5b" podStartSLOduration=123.190315516 podStartE2EDuration="2m3.190315516s" podCreationTimestamp="2025-12-05 17:04:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:06:45.181314671 +0000 UTC m=+143.684421677" watchObservedRunningTime="2025-12-05 17:06:45.190315516 +0000 UTC m=+143.693422522" Dec 05 17:06:45 crc kubenswrapper[4753]: I1205 17:06:45.199424 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-lcd59" event={"ID":"c9582e4b-c48a-470f-a0f7-9cbad68d972f","Type":"ContainerStarted","Data":"a34c104b3de3df7d923687641eb0ce9287e693ca6a1a276aaf6e00acfd6ab31a"} Dec 05 17:06:45 crc kubenswrapper[4753]: I1205 17:06:45.233863 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-flncn" event={"ID":"660abb91-a2d0-4cf3-b0d0-073b1cccdf37","Type":"ContainerStarted","Data":"7e6a6e41e2e1d7c5880a1afd9a76d1001f6df0c23555fd6996e6dd9cd021b879"} Dec 05 17:06:45 crc kubenswrapper[4753]: I1205 17:06:45.234394 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-flncn" Dec 05 17:06:45 crc kubenswrapper[4753]: I1205 17:06:45.235418 4753 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-flncn container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.38:8443/healthz\": dial tcp 10.217.0.38:8443: connect: connection refused" start-of-body= Dec 05 17:06:45 crc kubenswrapper[4753]: I1205 17:06:45.235487 4753 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-flncn" podUID="660abb91-a2d0-4cf3-b0d0-073b1cccdf37" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.38:8443/healthz\": dial tcp 10.217.0.38:8443: connect: connection refused" Dec 05 17:06:45 crc kubenswrapper[4753]: I1205 17:06:45.240751 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:45 crc kubenswrapper[4753]: E1205 17:06:45.243541 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:06:45.743520955 +0000 UTC m=+144.246628161 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4vvd9" (UID: "abca9c47-e52b-4410-83e1-b486f8f01aca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:45 crc kubenswrapper[4753]: I1205 17:06:45.282913 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-lcd59" podStartSLOduration=123.282896056 podStartE2EDuration="2m3.282896056s" podCreationTimestamp="2025-12-05 17:04:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:06:45.23285022 +0000 UTC m=+143.735957226" watchObservedRunningTime="2025-12-05 17:06:45.282896056 +0000 UTC m=+143.786003062" Dec 05 17:06:45 crc kubenswrapper[4753]: I1205 17:06:45.292571 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-kzdfh" event={"ID":"c3117f4b-6f3a-4131-9001-d39222e6f268","Type":"ContainerStarted","Data":"27d65245dab10d3567db7e681dd5e984b902ce2f2fdbb9a740d47b5267a2294c"} Dec 05 17:06:45 crc kubenswrapper[4753]: I1205 17:06:45.297013 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-kzdfh" Dec 05 17:06:45 crc kubenswrapper[4753]: I1205 17:06:45.300617 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-flncn" podStartSLOduration=122.300590148 podStartE2EDuration="2m2.300590148s" podCreationTimestamp="2025-12-05 17:04:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:06:45.284513124 +0000 UTC m=+143.787620130" watchObservedRunningTime="2025-12-05 17:06:45.300590148 +0000 UTC m=+143.803697154" Dec 05 17:06:45 crc kubenswrapper[4753]: I1205 17:06:45.338412 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-q2bmk" event={"ID":"72117afc-2e5f-4696-b515-76aced63c30f","Type":"ContainerStarted","Data":"fc3ab6f59dce4190c6f263214d97b7aa6c49c1e59a6d9511dec25e0cd297a044"} Dec 05 17:06:45 crc kubenswrapper[4753]: I1205 17:06:45.343962 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:06:45 crc kubenswrapper[4753]: E1205 17:06:45.345264 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:06:45.845247595 +0000 UTC m=+144.348354601 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:45 crc kubenswrapper[4753]: I1205 17:06:45.419127 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-hzrmq" event={"ID":"92cd61ad-2356-4854-ad74-bde03a83abf0","Type":"ContainerStarted","Data":"82aceacc560b01307d87ebb8f3ae5c74d4f1f69ea480076d0d4019b095f29d99"} Dec 05 17:06:45 crc kubenswrapper[4753]: I1205 17:06:45.447899 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:45 crc kubenswrapper[4753]: E1205 17:06:45.450546 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:06:45.950526169 +0000 UTC m=+144.453633175 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4vvd9" (UID: "abca9c47-e52b-4410-83e1-b486f8f01aca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:45 crc kubenswrapper[4753]: I1205 17:06:45.453288 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-hq6h7" event={"ID":"667c9562-fe34-4ad9-9150-43613978c0d8","Type":"ContainerStarted","Data":"4e3590cd94f61fb2de3d11758baea7d3a090e76844fd31f3fb6f8cff7f965b1d"} Dec 05 17:06:45 crc kubenswrapper[4753]: I1205 17:06:45.474816 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-9tzjp" event={"ID":"de65df6c-3a9a-4041-a67c-e6cbd766b4b2","Type":"ContainerStarted","Data":"fc8ad44d57dca472e9c0a1283778cbb9999b7e4a5b64362d9f8227db9c45f419"} Dec 05 17:06:45 crc kubenswrapper[4753]: I1205 17:06:45.493005 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-q2bmk" podStartSLOduration=123.492982141 podStartE2EDuration="2m3.492982141s" podCreationTimestamp="2025-12-05 17:04:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:06:45.471398604 +0000 UTC m=+143.974505610" watchObservedRunningTime="2025-12-05 17:06:45.492982141 +0000 UTC m=+143.996089147" Dec 05 17:06:45 crc kubenswrapper[4753]: I1205 17:06:45.493309 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-kzdfh" podStartSLOduration=123.49330517 podStartE2EDuration="2m3.49330517s" podCreationTimestamp="2025-12-05 17:04:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:06:45.415002712 +0000 UTC m=+143.918109718" watchObservedRunningTime="2025-12-05 17:06:45.49330517 +0000 UTC m=+143.996412176" Dec 05 17:06:45 crc kubenswrapper[4753]: I1205 17:06:45.538497 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415900-2lmxf" event={"ID":"c18256b2-4287-4df0-8819-201cf14c6380","Type":"ContainerStarted","Data":"d0aa2cfd4ee444d4e2c1e30dd674db80a2bfaeda0a0e49f9b1d208125591adcf"} Dec 05 17:06:45 crc kubenswrapper[4753]: I1205 17:06:45.549665 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:06:45 crc kubenswrapper[4753]: E1205 17:06:45.551593 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:06:46.051561678 +0000 UTC m=+144.554668684 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:45 crc kubenswrapper[4753]: I1205 17:06:45.575782 4753 generic.go:334] "Generic (PLEG): container finished" podID="782feb3b-5fe4-413e-87a0-9602f412897e" containerID="b69b3fc64eb970955da982196a3919b80d7e9a7c9e839be57fe9f953e215392f" exitCode=0 Dec 05 17:06:45 crc kubenswrapper[4753]: I1205 17:06:45.576652 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-rkmxh" event={"ID":"782feb3b-5fe4-413e-87a0-9602f412897e","Type":"ContainerDied","Data":"b69b3fc64eb970955da982196a3919b80d7e9a7c9e839be57fe9f953e215392f"} Dec 05 17:06:45 crc kubenswrapper[4753]: I1205 17:06:45.591006 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-hq6h7" podStartSLOduration=122.590986241 podStartE2EDuration="2m2.590986241s" podCreationTimestamp="2025-12-05 17:04:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:06:45.557642528 +0000 UTC m=+144.060749534" watchObservedRunningTime="2025-12-05 17:06:45.590986241 +0000 UTC m=+144.094093247" Dec 05 17:06:45 crc kubenswrapper[4753]: I1205 17:06:45.600714 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jwgnx" event={"ID":"002aa1a9-0253-4b17-8c8c-d23c830c46cc","Type":"ContainerStarted","Data":"5ed8ca2a07a8ea250c2a1b6a56431b1107ea997cd3605e565c16daa132d579eb"} Dec 05 17:06:45 crc kubenswrapper[4753]: I1205 17:06:45.682992 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:45 crc kubenswrapper[4753]: I1205 17:06:45.683539 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-hzrmq" podStartSLOduration=123.683509049 podStartE2EDuration="2m3.683509049s" podCreationTimestamp="2025-12-05 17:04:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:06:45.593271258 +0000 UTC m=+144.096378264" watchObservedRunningTime="2025-12-05 17:06:45.683509049 +0000 UTC m=+144.186616055" Dec 05 17:06:45 crc kubenswrapper[4753]: E1205 17:06:45.685996 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:06:46.185982142 +0000 UTC m=+144.689089148 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4vvd9" (UID: "abca9c47-e52b-4410-83e1-b486f8f01aca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:45 crc kubenswrapper[4753]: I1205 17:06:45.711635 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-69bc9" event={"ID":"cbcd15e6-1457-4722-b319-13651ca7598b","Type":"ContainerStarted","Data":"96d24e99a137d44786051deca3c2c28b90e181ce17a6afa3ab694e8227575d7a"} Dec 05 17:06:45 crc kubenswrapper[4753]: I1205 17:06:45.735972 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29415900-2lmxf" podStartSLOduration=123.735945135 podStartE2EDuration="2m3.735945135s" podCreationTimestamp="2025-12-05 17:04:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:06:45.729882207 +0000 UTC m=+144.232989233" watchObservedRunningTime="2025-12-05 17:06:45.735945135 +0000 UTC m=+144.239052141" Dec 05 17:06:45 crc kubenswrapper[4753]: I1205 17:06:45.786124 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:06:45 crc kubenswrapper[4753]: E1205 17:06:45.786568 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:06:46.286548408 +0000 UTC m=+144.789655414 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:45 crc kubenswrapper[4753]: I1205 17:06:45.796258 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-9j8t7" event={"ID":"783086d1-b0d9-4f35-b3d1-95cd8df517aa","Type":"ContainerStarted","Data":"cd232ae3fafcc665783071e61f4d860c4154d65a0b78e0f433201ef68b829b90"} Dec 05 17:06:45 crc kubenswrapper[4753]: I1205 17:06:45.797230 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-9j8t7" Dec 05 17:06:45 crc kubenswrapper[4753]: I1205 17:06:45.814468 4753 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-9j8t7 container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.32:5443/healthz\": dial tcp 10.217.0.32:5443: connect: connection refused" start-of-body= Dec 05 17:06:45 crc kubenswrapper[4753]: I1205 17:06:45.814558 4753 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-9j8t7" podUID="783086d1-b0d9-4f35-b3d1-95cd8df517aa" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.32:5443/healthz\": dial tcp 10.217.0.32:5443: connect: connection refused" Dec 05 17:06:45 crc kubenswrapper[4753]: I1205 17:06:45.815354 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-9tzjp" podStartSLOduration=123.815336936 podStartE2EDuration="2m3.815336936s" podCreationTimestamp="2025-12-05 17:04:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:06:45.814901643 +0000 UTC m=+144.318008649" watchObservedRunningTime="2025-12-05 17:06:45.815336936 +0000 UTC m=+144.318443942" Dec 05 17:06:45 crc kubenswrapper[4753]: I1205 17:06:45.820802 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-m9rf5" event={"ID":"8dfb6055-bb8a-4b03-88cb-0ec84a2b1490","Type":"ContainerStarted","Data":"de81b964da3e299ca9155de7c4c4f2503e481a50200908e8a51196e1d703c34e"} Dec 05 17:06:45 crc kubenswrapper[4753]: I1205 17:06:45.842671 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-xhb8z" event={"ID":"cf24b057-f212-46d9-a3f8-0ef08669940d","Type":"ContainerStarted","Data":"c56eb067047ec4be2645468a74ad69959547971811288b23e9fbde360027e274"} Dec 05 17:06:45 crc kubenswrapper[4753]: I1205 17:06:45.845361 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-xhb8z" Dec 05 17:06:45 crc kubenswrapper[4753]: I1205 17:06:45.882696 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-9j8t7" podStartSLOduration=122.882671702 podStartE2EDuration="2m2.882671702s" podCreationTimestamp="2025-12-05 17:04:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:06:45.876751627 +0000 UTC m=+144.379858623" watchObservedRunningTime="2025-12-05 17:06:45.882671702 +0000 UTC m=+144.385778708" Dec 05 17:06:45 crc kubenswrapper[4753]: I1205 17:06:45.891499 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:45 crc kubenswrapper[4753]: I1205 17:06:45.893529 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-rvmt6" event={"ID":"86d71fb0-ec2b-4606-bb01-0e88f42b572b","Type":"ContainerStarted","Data":"cbe7bd933b013ebcda261fd492c22db45a5c3a48b45b38b97e480ec867fa6c13"} Dec 05 17:06:45 crc kubenswrapper[4753]: E1205 17:06:45.894431 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:06:46.394418218 +0000 UTC m=+144.897525224 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4vvd9" (UID: "abca9c47-e52b-4410-83e1-b486f8f01aca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:45 crc kubenswrapper[4753]: I1205 17:06:45.907471 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jwgnx" podStartSLOduration=123.907455283 podStartE2EDuration="2m3.907455283s" podCreationTimestamp="2025-12-05 17:04:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:06:45.90601035 +0000 UTC m=+144.409117356" watchObservedRunningTime="2025-12-05 17:06:45.907455283 +0000 UTC m=+144.410562289" Dec 05 17:06:45 crc kubenswrapper[4753]: I1205 17:06:45.909815 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-w58zz" event={"ID":"5881adf5-7728-46d0-b449-b0d8b0d77c7d","Type":"ContainerStarted","Data":"c4cad348598c5002dc3df90901b18f456bf117e8a69cfae678ad9fce49b2e4bd"} Dec 05 17:06:45 crc kubenswrapper[4753]: I1205 17:06:45.910017 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-w58zz" Dec 05 17:06:45 crc kubenswrapper[4753]: I1205 17:06:45.911422 4753 patch_prober.go:28] interesting pod/downloads-7954f5f757-2nb5t container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" start-of-body= Dec 05 17:06:45 crc kubenswrapper[4753]: I1205 17:06:45.911843 4753 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-2nb5t" podUID="f119bafa-ed3a-42d2-876f-c63999b216e1" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" Dec 05 17:06:45 crc kubenswrapper[4753]: I1205 17:06:45.927391 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-64nzp" Dec 05 17:06:45 crc kubenswrapper[4753]: I1205 17:06:45.934527 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-w58zz" Dec 05 17:06:45 crc kubenswrapper[4753]: I1205 17:06:45.990261 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-hqm52" podStartSLOduration=122.990235804 podStartE2EDuration="2m2.990235804s" podCreationTimestamp="2025-12-05 17:04:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:06:45.987957347 +0000 UTC m=+144.491064353" watchObservedRunningTime="2025-12-05 17:06:45.990235804 +0000 UTC m=+144.493342810" Dec 05 17:06:45 crc kubenswrapper[4753]: I1205 17:06:45.990808 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-69bc9" podStartSLOduration=122.99080167 podStartE2EDuration="2m2.99080167s" podCreationTimestamp="2025-12-05 17:04:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:06:45.951053008 +0000 UTC m=+144.454160004" watchObservedRunningTime="2025-12-05 17:06:45.99080167 +0000 UTC m=+144.493908676" Dec 05 17:06:45 crc kubenswrapper[4753]: I1205 17:06:45.995951 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:06:45 crc kubenswrapper[4753]: E1205 17:06:45.997247 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:06:46.49723195 +0000 UTC m=+145.000338956 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:46 crc kubenswrapper[4753]: I1205 17:06:46.086743 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-9tzjp" Dec 05 17:06:46 crc kubenswrapper[4753]: I1205 17:06:46.099218 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:46 crc kubenswrapper[4753]: E1205 17:06:46.100015 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:06:46.60000306 +0000 UTC m=+145.103110066 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4vvd9" (UID: "abca9c47-e52b-4410-83e1-b486f8f01aca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:46 crc kubenswrapper[4753]: I1205 17:06:46.126275 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-xhb8z" podStartSLOduration=8.126259695 podStartE2EDuration="8.126259695s" podCreationTimestamp="2025-12-05 17:06:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:06:46.082074682 +0000 UTC m=+144.585181688" watchObservedRunningTime="2025-12-05 17:06:46.126259695 +0000 UTC m=+144.629366701" Dec 05 17:06:46 crc kubenswrapper[4753]: I1205 17:06:46.132134 4753 patch_prober.go:28] interesting pod/router-default-5444994796-9tzjp container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 17:06:46 crc kubenswrapper[4753]: [-]has-synced failed: reason withheld Dec 05 17:06:46 crc kubenswrapper[4753]: [+]process-running ok Dec 05 17:06:46 crc kubenswrapper[4753]: healthz check failed Dec 05 17:06:46 crc kubenswrapper[4753]: I1205 17:06:46.132465 4753 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-9tzjp" podUID="de65df6c-3a9a-4041-a67c-e6cbd766b4b2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 17:06:46 crc kubenswrapper[4753]: I1205 17:06:46.201570 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:06:46 crc kubenswrapper[4753]: E1205 17:06:46.201758 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:06:46.70173227 +0000 UTC m=+145.204839276 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:46 crc kubenswrapper[4753]: I1205 17:06:46.203421 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:46 crc kubenswrapper[4753]: E1205 17:06:46.203851 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:06:46.703837862 +0000 UTC m=+145.206944868 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4vvd9" (UID: "abca9c47-e52b-4410-83e1-b486f8f01aca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:46 crc kubenswrapper[4753]: I1205 17:06:46.300218 4753 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-kzdfh container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.20:6443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 05 17:06:46 crc kubenswrapper[4753]: I1205 17:06:46.300623 4753 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-kzdfh" podUID="c3117f4b-6f3a-4131-9001-d39222e6f268" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.20:6443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 05 17:06:46 crc kubenswrapper[4753]: I1205 17:06:46.305006 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:06:46 crc kubenswrapper[4753]: E1205 17:06:46.305141 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:06:46.805117009 +0000 UTC m=+145.308224015 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:46 crc kubenswrapper[4753]: I1205 17:06:46.305441 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:46 crc kubenswrapper[4753]: E1205 17:06:46.305909 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:06:46.805890251 +0000 UTC m=+145.308997247 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4vvd9" (UID: "abca9c47-e52b-4410-83e1-b486f8f01aca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:46 crc kubenswrapper[4753]: I1205 17:06:46.406468 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:06:46 crc kubenswrapper[4753]: E1205 17:06:46.406698 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:06:46.906673133 +0000 UTC m=+145.409780139 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:46 crc kubenswrapper[4753]: I1205 17:06:46.508206 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:46 crc kubenswrapper[4753]: E1205 17:06:46.508538 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:06:47.008520337 +0000 UTC m=+145.511627343 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4vvd9" (UID: "abca9c47-e52b-4410-83e1-b486f8f01aca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:46 crc kubenswrapper[4753]: I1205 17:06:46.609046 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:06:46 crc kubenswrapper[4753]: E1205 17:06:46.609241 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:06:47.109215056 +0000 UTC m=+145.612322062 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:46 crc kubenswrapper[4753]: I1205 17:06:46.610219 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:46 crc kubenswrapper[4753]: E1205 17:06:46.610578 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:06:47.110561225 +0000 UTC m=+145.613668231 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4vvd9" (UID: "abca9c47-e52b-4410-83e1-b486f8f01aca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:46 crc kubenswrapper[4753]: I1205 17:06:46.711957 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:06:46 crc kubenswrapper[4753]: E1205 17:06:46.712176 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:06:47.21212931 +0000 UTC m=+145.715236306 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:46 crc kubenswrapper[4753]: I1205 17:06:46.712560 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:46 crc kubenswrapper[4753]: E1205 17:06:46.712904 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:06:47.212896133 +0000 UTC m=+145.716003139 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4vvd9" (UID: "abca9c47-e52b-4410-83e1-b486f8f01aca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:46 crc kubenswrapper[4753]: I1205 17:06:46.813583 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:06:46 crc kubenswrapper[4753]: E1205 17:06:46.813968 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:06:47.313945453 +0000 UTC m=+145.817052469 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:46 crc kubenswrapper[4753]: I1205 17:06:46.915099 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:46 crc kubenswrapper[4753]: E1205 17:06:46.915523 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:06:47.415504697 +0000 UTC m=+145.918611703 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4vvd9" (UID: "abca9c47-e52b-4410-83e1-b486f8f01aca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:46 crc kubenswrapper[4753]: I1205 17:06:46.922632 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-wk9v7" event={"ID":"4bad5655-05a5-4081-868b-3d2e69df620f","Type":"ContainerStarted","Data":"e1c530fda9346e0e0f44bba9117317dfe74f0921ab86cb0c9c66e6b312fac41b"} Dec 05 17:06:46 crc kubenswrapper[4753]: I1205 17:06:46.924872 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-m9rf5" event={"ID":"8dfb6055-bb8a-4b03-88cb-0ec84a2b1490","Type":"ContainerStarted","Data":"e8b53a0138d03f553253a97bef74c50b98750834c94ad4219c7eac47c93b4adb"} Dec 05 17:06:46 crc kubenswrapper[4753]: I1205 17:06:46.931259 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-djddj" event={"ID":"24e4585c-4850-4101-a40d-b38424860805","Type":"ContainerStarted","Data":"488dd08ad399c2cd53a9e729abdc66c156d29124f00d3f0e92e43beaee608295"} Dec 05 17:06:46 crc kubenswrapper[4753]: I1205 17:06:46.933191 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-55drw" event={"ID":"6b4d13e0-6218-444d-822b-656abbf0b80c","Type":"ContainerStarted","Data":"689efefc50f5c84b2905540d921197b8ec1490d5f29523f33a6ac066b03b9abc"} Dec 05 17:06:46 crc kubenswrapper[4753]: I1205 17:06:46.940318 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-s87ds" event={"ID":"03fd3f93-fb41-4e32-8276-416c40f2b9a7","Type":"ContainerStarted","Data":"5be7c5960ffb2fc15b84d5390bf5773ad1ba4843c0fb43e769be20a63f92d632"} Dec 05 17:06:46 crc kubenswrapper[4753]: I1205 17:06:46.940612 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-s87ds" Dec 05 17:06:46 crc kubenswrapper[4753]: I1205 17:06:46.959331 4753 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-s87ds container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.40:8080/healthz\": dial tcp 10.217.0.40:8080: connect: connection refused" start-of-body= Dec 05 17:06:46 crc kubenswrapper[4753]: I1205 17:06:46.959411 4753 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-s87ds" podUID="03fd3f93-fb41-4e32-8276-416c40f2b9a7" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.40:8080/healthz\": dial tcp 10.217.0.40:8080: connect: connection refused" Dec 05 17:06:46 crc kubenswrapper[4753]: I1205 17:06:46.959505 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-vx9s2" event={"ID":"5cd4f96b-673e-4518-a8ee-da3ccb7a86b0","Type":"ContainerStarted","Data":"b9973b4374d66974d83247e7374465a332cb2180b437548bd389d5eb958cde3d"} Dec 05 17:06:46 crc kubenswrapper[4753]: I1205 17:06:46.982471 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mjbr2" event={"ID":"c8b527e5-cd18-4b81-aa80-0c7004486286","Type":"ContainerStarted","Data":"388a982cd29b30a92091456256eefa598166895dae796be63e9b8ec11821c271"} Dec 05 17:06:46 crc kubenswrapper[4753]: I1205 17:06:46.982535 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mjbr2" event={"ID":"c8b527e5-cd18-4b81-aa80-0c7004486286","Type":"ContainerStarted","Data":"5bf323d022b37e865070e60e5b207b46572990e7abbd9d98dd313af920ec6b6d"} Dec 05 17:06:46 crc kubenswrapper[4753]: I1205 17:06:46.983299 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mjbr2" Dec 05 17:06:46 crc kubenswrapper[4753]: I1205 17:06:46.994993 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-w58zz" podStartSLOduration=123.994977121 podStartE2EDuration="2m3.994977121s" podCreationTimestamp="2025-12-05 17:04:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:06:46.248477669 +0000 UTC m=+144.751584675" watchObservedRunningTime="2025-12-05 17:06:46.994977121 +0000 UTC m=+145.498084127" Dec 05 17:06:46 crc kubenswrapper[4753]: I1205 17:06:46.995948 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-wk9v7" podStartSLOduration=124.995940089 podStartE2EDuration="2m4.995940089s" podCreationTimestamp="2025-12-05 17:04:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:06:46.993690963 +0000 UTC m=+145.496797969" watchObservedRunningTime="2025-12-05 17:06:46.995940089 +0000 UTC m=+145.499047095" Dec 05 17:06:46 crc kubenswrapper[4753]: I1205 17:06:46.998671 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tfkgs" event={"ID":"bb0b91f2-5d2a-4bd3-92da-6573800548ff","Type":"ContainerStarted","Data":"f447a342f062ed487bee45995d8c3135fdfeb17b9a3aec7e5735db1f75be1182"} Dec 05 17:06:47 crc kubenswrapper[4753]: I1205 17:06:47.016697 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:06:47 crc kubenswrapper[4753]: E1205 17:06:47.018566 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:06:47.518547656 +0000 UTC m=+146.021654692 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:47 crc kubenswrapper[4753]: I1205 17:06:47.016764 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-4gllz" event={"ID":"bcea690d-529b-4175-b5fd-a1a07970cf0d","Type":"ContainerStarted","Data":"0e0ecd3a1e4d97b54f11a1b151bcb6f28cdd5c51c288db289d64dcaede3a1638"} Dec 05 17:06:47 crc kubenswrapper[4753]: I1205 17:06:47.018616 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-4gllz" Dec 05 17:06:47 crc kubenswrapper[4753]: I1205 17:06:47.039911 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-s87ds" podStartSLOduration=124.039892575 podStartE2EDuration="2m4.039892575s" podCreationTimestamp="2025-12-05 17:04:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:06:47.032456436 +0000 UTC m=+145.535563442" watchObservedRunningTime="2025-12-05 17:06:47.039892575 +0000 UTC m=+145.542999581" Dec 05 17:06:47 crc kubenswrapper[4753]: I1205 17:06:47.048478 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-9j8t7" event={"ID":"783086d1-b0d9-4f35-b3d1-95cd8df517aa","Type":"ContainerStarted","Data":"ac221131c3441f1ec609510ac60d1d0cae31481972fcf4229747609491a31af1"} Dec 05 17:06:47 crc kubenswrapper[4753]: I1205 17:06:47.049919 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-4gllz" Dec 05 17:06:47 crc kubenswrapper[4753]: I1205 17:06:47.058024 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-pcb9b" event={"ID":"27067136-5ba5-407b-a4a5-4d1e8c284564","Type":"ContainerStarted","Data":"69f4a5ff8f0e3dae9414f3791346b810e242b2b2af8379756a2def12fc6f7213"} Dec 05 17:06:47 crc kubenswrapper[4753]: I1205 17:06:47.076891 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mjbr2" podStartSLOduration=124.076874596 podStartE2EDuration="2m4.076874596s" podCreationTimestamp="2025-12-05 17:04:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:06:47.075107824 +0000 UTC m=+145.578214830" watchObservedRunningTime="2025-12-05 17:06:47.076874596 +0000 UTC m=+145.579981602" Dec 05 17:06:47 crc kubenswrapper[4753]: I1205 17:06:47.079604 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zdzdl" event={"ID":"40a88e42-a0d7-4703-a3f8-25f524f90eca","Type":"ContainerStarted","Data":"a3b8b460fa225a4cea8c6e14f164ae5d582d32fbc86c7622317face6f32539ec"} Dec 05 17:06:47 crc kubenswrapper[4753]: I1205 17:06:47.097972 4753 patch_prober.go:28] interesting pod/router-default-5444994796-9tzjp container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 17:06:47 crc kubenswrapper[4753]: [-]has-synced failed: reason withheld Dec 05 17:06:47 crc kubenswrapper[4753]: [+]process-running ok Dec 05 17:06:47 crc kubenswrapper[4753]: healthz check failed Dec 05 17:06:47 crc kubenswrapper[4753]: I1205 17:06:47.098430 4753 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-9tzjp" podUID="de65df6c-3a9a-4041-a67c-e6cbd766b4b2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 17:06:47 crc kubenswrapper[4753]: I1205 17:06:47.116097 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-wqn7l" event={"ID":"3f2ff5f2-b8e0-4d37-80df-3ff278b01d0a","Type":"ContainerStarted","Data":"5be35a5929eebdad01e19254859737d9de4d3638581b4b2797c8252713732055"} Dec 05 17:06:47 crc kubenswrapper[4753]: I1205 17:06:47.118777 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:47 crc kubenswrapper[4753]: E1205 17:06:47.119391 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:06:47.619357039 +0000 UTC m=+146.122464045 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4vvd9" (UID: "abca9c47-e52b-4410-83e1-b486f8f01aca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:47 crc kubenswrapper[4753]: I1205 17:06:47.138431 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-hq6h7" event={"ID":"667c9562-fe34-4ad9-9150-43613978c0d8","Type":"ContainerStarted","Data":"915dff2b34f0719a32e096727b86ba7f3c9b97a509a98b49a1c88288ab5d2764"} Dec 05 17:06:47 crc kubenswrapper[4753]: I1205 17:06:47.140879 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-m9rf5" podStartSLOduration=125.140862283 podStartE2EDuration="2m5.140862283s" podCreationTimestamp="2025-12-05 17:04:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:06:47.134831605 +0000 UTC m=+145.637938611" watchObservedRunningTime="2025-12-05 17:06:47.140862283 +0000 UTC m=+145.643969279" Dec 05 17:06:47 crc kubenswrapper[4753]: I1205 17:06:47.185781 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-22ncf" event={"ID":"d4774a0f-e7ca-49bf-a25e-c2a88a7cdc3b","Type":"ContainerStarted","Data":"8cbde4d549389c5ff339585a0adb069a6c1b6a9e41cc76572ae0c677127ae6cf"} Dec 05 17:06:47 crc kubenswrapper[4753]: I1205 17:06:47.209209 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-55drw" podStartSLOduration=124.209174617 podStartE2EDuration="2m4.209174617s" podCreationTimestamp="2025-12-05 17:04:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:06:47.190577479 +0000 UTC m=+145.693684485" watchObservedRunningTime="2025-12-05 17:06:47.209174617 +0000 UTC m=+145.712281613" Dec 05 17:06:47 crc kubenswrapper[4753]: I1205 17:06:47.239159 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:06:47 crc kubenswrapper[4753]: E1205 17:06:47.240318 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:06:47.740286934 +0000 UTC m=+146.243393940 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:47 crc kubenswrapper[4753]: I1205 17:06:47.241526 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-flncn" event={"ID":"660abb91-a2d0-4cf3-b0d0-073b1cccdf37","Type":"ContainerStarted","Data":"ada911e35e811dc1b2f8b7cf22db05458c2996c56ca89b78f1d34756dba5456b"} Dec 05 17:06:47 crc kubenswrapper[4753]: I1205 17:06:47.266518 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-flncn" Dec 05 17:06:47 crc kubenswrapper[4753]: I1205 17:06:47.286320 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tfkgs" podStartSLOduration=125.286296311 podStartE2EDuration="2m5.286296311s" podCreationTimestamp="2025-12-05 17:04:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:06:47.238082389 +0000 UTC m=+145.741189395" watchObservedRunningTime="2025-12-05 17:06:47.286296311 +0000 UTC m=+145.789403307" Dec 05 17:06:47 crc kubenswrapper[4753]: I1205 17:06:47.309718 4753 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Dec 05 17:06:47 crc kubenswrapper[4753]: I1205 17:06:47.310048 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-tqddg" event={"ID":"c81db375-2956-49d8-856b-90395544e758","Type":"ContainerStarted","Data":"0f5d49846b29ae7ee1c27dd1f87e89277cbd899c18213c089a2d5887d98c0677"} Dec 05 17:06:47 crc kubenswrapper[4753]: I1205 17:06:47.310098 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-tqddg" event={"ID":"c81db375-2956-49d8-856b-90395544e758","Type":"ContainerStarted","Data":"e38d91756f60b518204c69fa97ad6cc3a440a585335d165f2ca2a2c9de38c0f8"} Dec 05 17:06:47 crc kubenswrapper[4753]: I1205 17:06:47.340859 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-pcb9b" podStartSLOduration=124.340826759 podStartE2EDuration="2m4.340826759s" podCreationTimestamp="2025-12-05 17:04:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:06:47.288181717 +0000 UTC m=+145.791288733" watchObservedRunningTime="2025-12-05 17:06:47.340826759 +0000 UTC m=+145.843933775" Dec 05 17:06:47 crc kubenswrapper[4753]: I1205 17:06:47.344795 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415900-2lmxf" event={"ID":"c18256b2-4287-4df0-8819-201cf14c6380","Type":"ContainerStarted","Data":"89de624176223c811cb2f57a41e6098fc22334831d8859e206efa904e026fa14"} Dec 05 17:06:47 crc kubenswrapper[4753]: I1205 17:06:47.346569 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:47 crc kubenswrapper[4753]: E1205 17:06:47.347879 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:06:47.847865677 +0000 UTC m=+146.350972683 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4vvd9" (UID: "abca9c47-e52b-4410-83e1-b486f8f01aca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:47 crc kubenswrapper[4753]: I1205 17:06:47.359453 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-rkmxh" event={"ID":"782feb3b-5fe4-413e-87a0-9602f412897e","Type":"ContainerStarted","Data":"fccb02dcf4b772b4860946cfb44dad8b9f13e3c85aa73c294ef7c869df0fb3ba"} Dec 05 17:06:47 crc kubenswrapper[4753]: I1205 17:06:47.359517 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-rkmxh" event={"ID":"782feb3b-5fe4-413e-87a0-9602f412897e","Type":"ContainerStarted","Data":"582ead97e495cb061c2e5b41e01676a82ac20a7eb2afb3b332ffa5ff6d50889e"} Dec 05 17:06:47 crc kubenswrapper[4753]: I1205 17:06:47.372201 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zdzdl" podStartSLOduration=124.372168633 podStartE2EDuration="2m4.372168633s" podCreationTimestamp="2025-12-05 17:04:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:06:47.345371343 +0000 UTC m=+145.848478349" watchObservedRunningTime="2025-12-05 17:06:47.372168633 +0000 UTC m=+145.875275639" Dec 05 17:06:47 crc kubenswrapper[4753]: I1205 17:06:47.376289 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-hzrmq" event={"ID":"92cd61ad-2356-4854-ad74-bde03a83abf0","Type":"ContainerStarted","Data":"e0b3e52d70c8e42edb4599c0cf3f7ebc3aaa2fb34faace4545d252b9ff35b2d9"} Dec 05 17:06:47 crc kubenswrapper[4753]: I1205 17:06:47.396441 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-xhb8z" event={"ID":"cf24b057-f212-46d9-a3f8-0ef08669940d","Type":"ContainerStarted","Data":"48afb7a6f826b14121a7ce292279cf1c24c9ecf6aca6e13dce1bf7f1ab8ab8ae"} Dec 05 17:06:47 crc kubenswrapper[4753]: I1205 17:06:47.435075 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-4gllz" podStartSLOduration=125.435056118 podStartE2EDuration="2m5.435056118s" podCreationTimestamp="2025-12-05 17:04:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:06:47.384130096 +0000 UTC m=+145.887237102" watchObservedRunningTime="2025-12-05 17:06:47.435056118 +0000 UTC m=+145.938163114" Dec 05 17:06:47 crc kubenswrapper[4753]: I1205 17:06:47.435895 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-22ncf" podStartSLOduration=124.435889452 podStartE2EDuration="2m4.435889452s" podCreationTimestamp="2025-12-05 17:04:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:06:47.425167876 +0000 UTC m=+145.928274882" watchObservedRunningTime="2025-12-05 17:06:47.435889452 +0000 UTC m=+145.938996448" Dec 05 17:06:47 crc kubenswrapper[4753]: I1205 17:06:47.440188 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-rvmt6" event={"ID":"86d71fb0-ec2b-4606-bb01-0e88f42b572b","Type":"ContainerStarted","Data":"b3cc2f920809fae552ce7255b7e91279c4d37acfcc7a6fbe1efbe40db7169d21"} Dec 05 17:06:47 crc kubenswrapper[4753]: I1205 17:06:47.440234 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-rvmt6" event={"ID":"86d71fb0-ec2b-4606-bb01-0e88f42b572b","Type":"ContainerStarted","Data":"ce2dc1fa6391118b99d855cd3bbf98a42df054cc94fc34a030f9447c7a5797bd"} Dec 05 17:06:47 crc kubenswrapper[4753]: I1205 17:06:47.451091 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:06:47 crc kubenswrapper[4753]: E1205 17:06:47.451616 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:06:47.951597195 +0000 UTC m=+146.454704201 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:47 crc kubenswrapper[4753]: I1205 17:06:47.464896 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-kzdfh" Dec 05 17:06:47 crc kubenswrapper[4753]: I1205 17:06:47.553813 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:47 crc kubenswrapper[4753]: E1205 17:06:47.559372 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:06:48.059357373 +0000 UTC m=+146.562464379 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4vvd9" (UID: "abca9c47-e52b-4410-83e1-b486f8f01aca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:47 crc kubenswrapper[4753]: I1205 17:06:47.570891 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-wqn7l" podStartSLOduration=124.570874003 podStartE2EDuration="2m4.570874003s" podCreationTimestamp="2025-12-05 17:04:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:06:47.491737099 +0000 UTC m=+145.994844105" watchObservedRunningTime="2025-12-05 17:06:47.570874003 +0000 UTC m=+146.073981009" Dec 05 17:06:47 crc kubenswrapper[4753]: I1205 17:06:47.655299 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:06:47 crc kubenswrapper[4753]: E1205 17:06:47.655866 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:06:48.155851218 +0000 UTC m=+146.658958224 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:47 crc kubenswrapper[4753]: I1205 17:06:47.683431 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-rvmt6" podStartSLOduration=124.683407811 podStartE2EDuration="2m4.683407811s" podCreationTimestamp="2025-12-05 17:04:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:06:47.603077542 +0000 UTC m=+146.106184548" watchObservedRunningTime="2025-12-05 17:06:47.683407811 +0000 UTC m=+146.186514817" Dec 05 17:06:47 crc kubenswrapper[4753]: I1205 17:06:47.763499 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:47 crc kubenswrapper[4753]: E1205 17:06:47.763847 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:06:48.263833443 +0000 UTC m=+146.766940449 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4vvd9" (UID: "abca9c47-e52b-4410-83e1-b486f8f01aca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:47 crc kubenswrapper[4753]: I1205 17:06:47.768706 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-dgpzc"] Dec 05 17:06:47 crc kubenswrapper[4753]: I1205 17:06:47.769917 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dgpzc" Dec 05 17:06:47 crc kubenswrapper[4753]: W1205 17:06:47.779199 4753 reflector.go:561] object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g": failed to list *v1.Secret: secrets "certified-operators-dockercfg-4rs5g" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-marketplace": no relationship found between node 'crc' and this object Dec 05 17:06:47 crc kubenswrapper[4753]: E1205 17:06:47.779256 4753 reflector.go:158] "Unhandled Error" err="object-\"openshift-marketplace\"/\"certified-operators-dockercfg-4rs5g\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"certified-operators-dockercfg-4rs5g\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-marketplace\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 17:06:47 crc kubenswrapper[4753]: I1205 17:06:47.850987 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-dgpzc"] Dec 05 17:06:47 crc kubenswrapper[4753]: I1205 17:06:47.851888 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-tqddg" podStartSLOduration=124.851878989 podStartE2EDuration="2m4.851878989s" podCreationTimestamp="2025-12-05 17:04:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:06:47.818576577 +0000 UTC m=+146.321683583" watchObservedRunningTime="2025-12-05 17:06:47.851878989 +0000 UTC m=+146.354985995" Dec 05 17:06:47 crc kubenswrapper[4753]: I1205 17:06:47.883504 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:06:47 crc kubenswrapper[4753]: E1205 17:06:47.883927 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:06:48.383873342 +0000 UTC m=+146.886980348 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:47 crc kubenswrapper[4753]: I1205 17:06:47.883988 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5560048a-c9e9-4743-9573-1b58a2240c29-catalog-content\") pod \"certified-operators-dgpzc\" (UID: \"5560048a-c9e9-4743-9573-1b58a2240c29\") " pod="openshift-marketplace/certified-operators-dgpzc" Dec 05 17:06:47 crc kubenswrapper[4753]: I1205 17:06:47.884089 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:47 crc kubenswrapper[4753]: E1205 17:06:47.885437 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:06:48.385418258 +0000 UTC m=+146.888525264 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4vvd9" (UID: "abca9c47-e52b-4410-83e1-b486f8f01aca") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:06:47 crc kubenswrapper[4753]: I1205 17:06:47.904410 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5nw7t\" (UniqueName: \"kubernetes.io/projected/5560048a-c9e9-4743-9573-1b58a2240c29-kube-api-access-5nw7t\") pod \"certified-operators-dgpzc\" (UID: \"5560048a-c9e9-4743-9573-1b58a2240c29\") " pod="openshift-marketplace/certified-operators-dgpzc" Dec 05 17:06:47 crc kubenswrapper[4753]: I1205 17:06:47.904635 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5560048a-c9e9-4743-9573-1b58a2240c29-utilities\") pod \"certified-operators-dgpzc\" (UID: \"5560048a-c9e9-4743-9573-1b58a2240c29\") " pod="openshift-marketplace/certified-operators-dgpzc" Dec 05 17:06:47 crc kubenswrapper[4753]: I1205 17:06:47.933347 4753 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-12-05T17:06:47.309759293Z","Handler":null,"Name":""} Dec 05 17:06:47 crc kubenswrapper[4753]: I1205 17:06:47.946107 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-cb4ng"] Dec 05 17:06:47 crc kubenswrapper[4753]: I1205 17:06:47.947453 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cb4ng" Dec 05 17:06:47 crc kubenswrapper[4753]: I1205 17:06:47.965576 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 05 17:06:47 crc kubenswrapper[4753]: I1205 17:06:47.969078 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-rkmxh" podStartSLOduration=125.969058424 podStartE2EDuration="2m5.969058424s" podCreationTimestamp="2025-12-05 17:04:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:06:47.967648913 +0000 UTC m=+146.470755939" watchObservedRunningTime="2025-12-05 17:06:47.969058424 +0000 UTC m=+146.472165430" Dec 05 17:06:47 crc kubenswrapper[4753]: I1205 17:06:47.995528 4753 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Dec 05 17:06:47 crc kubenswrapper[4753]: I1205 17:06:47.995573 4753 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Dec 05 17:06:48 crc kubenswrapper[4753]: I1205 17:06:48.006609 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:06:48 crc kubenswrapper[4753]: I1205 17:06:48.006930 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5nw7t\" (UniqueName: \"kubernetes.io/projected/5560048a-c9e9-4743-9573-1b58a2240c29-kube-api-access-5nw7t\") pod \"certified-operators-dgpzc\" (UID: \"5560048a-c9e9-4743-9573-1b58a2240c29\") " pod="openshift-marketplace/certified-operators-dgpzc" Dec 05 17:06:48 crc kubenswrapper[4753]: I1205 17:06:48.006991 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/db1dcb29-b751-4a77-8f2e-68efadf955b9-utilities\") pod \"community-operators-cb4ng\" (UID: \"db1dcb29-b751-4a77-8f2e-68efadf955b9\") " pod="openshift-marketplace/community-operators-cb4ng" Dec 05 17:06:48 crc kubenswrapper[4753]: I1205 17:06:48.007010 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/db1dcb29-b751-4a77-8f2e-68efadf955b9-catalog-content\") pod \"community-operators-cb4ng\" (UID: \"db1dcb29-b751-4a77-8f2e-68efadf955b9\") " pod="openshift-marketplace/community-operators-cb4ng" Dec 05 17:06:48 crc kubenswrapper[4753]: I1205 17:06:48.007033 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xnxg8\" (UniqueName: \"kubernetes.io/projected/db1dcb29-b751-4a77-8f2e-68efadf955b9-kube-api-access-xnxg8\") pod \"community-operators-cb4ng\" (UID: \"db1dcb29-b751-4a77-8f2e-68efadf955b9\") " pod="openshift-marketplace/community-operators-cb4ng" Dec 05 17:06:48 crc kubenswrapper[4753]: I1205 17:06:48.007058 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5560048a-c9e9-4743-9573-1b58a2240c29-utilities\") pod \"certified-operators-dgpzc\" (UID: \"5560048a-c9e9-4743-9573-1b58a2240c29\") " pod="openshift-marketplace/certified-operators-dgpzc" Dec 05 17:06:48 crc kubenswrapper[4753]: I1205 17:06:48.007080 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5560048a-c9e9-4743-9573-1b58a2240c29-catalog-content\") pod \"certified-operators-dgpzc\" (UID: \"5560048a-c9e9-4743-9573-1b58a2240c29\") " pod="openshift-marketplace/certified-operators-dgpzc" Dec 05 17:06:48 crc kubenswrapper[4753]: I1205 17:06:48.007255 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-cb4ng"] Dec 05 17:06:48 crc kubenswrapper[4753]: I1205 17:06:48.007544 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5560048a-c9e9-4743-9573-1b58a2240c29-catalog-content\") pod \"certified-operators-dgpzc\" (UID: \"5560048a-c9e9-4743-9573-1b58a2240c29\") " pod="openshift-marketplace/certified-operators-dgpzc" Dec 05 17:06:48 crc kubenswrapper[4753]: I1205 17:06:48.007828 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5560048a-c9e9-4743-9573-1b58a2240c29-utilities\") pod \"certified-operators-dgpzc\" (UID: \"5560048a-c9e9-4743-9573-1b58a2240c29\") " pod="openshift-marketplace/certified-operators-dgpzc" Dec 05 17:06:48 crc kubenswrapper[4753]: I1205 17:06:48.052635 4753 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-9j8t7 container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.32:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 05 17:06:48 crc kubenswrapper[4753]: I1205 17:06:48.052723 4753 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-9j8t7" podUID="783086d1-b0d9-4f35-b3d1-95cd8df517aa" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.32:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 05 17:06:48 crc kubenswrapper[4753]: I1205 17:06:48.066106 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5nw7t\" (UniqueName: \"kubernetes.io/projected/5560048a-c9e9-4743-9573-1b58a2240c29-kube-api-access-5nw7t\") pod \"certified-operators-dgpzc\" (UID: \"5560048a-c9e9-4743-9573-1b58a2240c29\") " pod="openshift-marketplace/certified-operators-dgpzc" Dec 05 17:06:48 crc kubenswrapper[4753]: I1205 17:06:48.095991 4753 patch_prober.go:28] interesting pod/router-default-5444994796-9tzjp container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 17:06:48 crc kubenswrapper[4753]: [-]has-synced failed: reason withheld Dec 05 17:06:48 crc kubenswrapper[4753]: [+]process-running ok Dec 05 17:06:48 crc kubenswrapper[4753]: healthz check failed Dec 05 17:06:48 crc kubenswrapper[4753]: I1205 17:06:48.096046 4753 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-9tzjp" podUID="de65df6c-3a9a-4041-a67c-e6cbd766b4b2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 17:06:48 crc kubenswrapper[4753]: I1205 17:06:48.098386 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-t2qjv"] Dec 05 17:06:48 crc kubenswrapper[4753]: I1205 17:06:48.099704 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-t2qjv" Dec 05 17:06:48 crc kubenswrapper[4753]: I1205 17:06:48.110503 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 05 17:06:48 crc kubenswrapper[4753]: E1205 17:06:48.111209 4753 reconciler_common.go:156] "operationExecutor.UnmountVolume failed (controllerAttachDetachEnabled true) for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") : UnmountVolume.NewUnmounter failed for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") : kubernetes.io/csi: unmounter failed to load volume data file [/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~csi/pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8/mount]: kubernetes.io/csi: failed to open volume data file [/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~csi/pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8/vol_data.json]: open /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~csi/pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8/vol_data.json: no such file or directory" err="UnmountVolume.NewUnmounter failed for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") : kubernetes.io/csi: unmounter failed to load volume data file [/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~csi/pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8/mount]: kubernetes.io/csi: failed to open volume data file [/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~csi/pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8/vol_data.json]: open /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~csi/pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8/vol_data.json: no such file or directory" Dec 05 17:06:48 crc kubenswrapper[4753]: I1205 17:06:48.111510 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/db1dcb29-b751-4a77-8f2e-68efadf955b9-utilities\") pod \"community-operators-cb4ng\" (UID: \"db1dcb29-b751-4a77-8f2e-68efadf955b9\") " pod="openshift-marketplace/community-operators-cb4ng" Dec 05 17:06:48 crc kubenswrapper[4753]: I1205 17:06:48.111537 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/db1dcb29-b751-4a77-8f2e-68efadf955b9-catalog-content\") pod \"community-operators-cb4ng\" (UID: \"db1dcb29-b751-4a77-8f2e-68efadf955b9\") " pod="openshift-marketplace/community-operators-cb4ng" Dec 05 17:06:48 crc kubenswrapper[4753]: I1205 17:06:48.111570 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xnxg8\" (UniqueName: \"kubernetes.io/projected/db1dcb29-b751-4a77-8f2e-68efadf955b9-kube-api-access-xnxg8\") pod \"community-operators-cb4ng\" (UID: \"db1dcb29-b751-4a77-8f2e-68efadf955b9\") " pod="openshift-marketplace/community-operators-cb4ng" Dec 05 17:06:48 crc kubenswrapper[4753]: I1205 17:06:48.111602 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:48 crc kubenswrapper[4753]: I1205 17:06:48.112089 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/db1dcb29-b751-4a77-8f2e-68efadf955b9-catalog-content\") pod \"community-operators-cb4ng\" (UID: \"db1dcb29-b751-4a77-8f2e-68efadf955b9\") " pod="openshift-marketplace/community-operators-cb4ng" Dec 05 17:06:48 crc kubenswrapper[4753]: I1205 17:06:48.116126 4753 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 05 17:06:48 crc kubenswrapper[4753]: I1205 17:06:48.116264 4753 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:48 crc kubenswrapper[4753]: I1205 17:06:48.119833 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/db1dcb29-b751-4a77-8f2e-68efadf955b9-utilities\") pod \"community-operators-cb4ng\" (UID: \"db1dcb29-b751-4a77-8f2e-68efadf955b9\") " pod="openshift-marketplace/community-operators-cb4ng" Dec 05 17:06:48 crc kubenswrapper[4753]: I1205 17:06:48.131300 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-t2qjv"] Dec 05 17:06:48 crc kubenswrapper[4753]: I1205 17:06:48.204917 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xnxg8\" (UniqueName: \"kubernetes.io/projected/db1dcb29-b751-4a77-8f2e-68efadf955b9-kube-api-access-xnxg8\") pod \"community-operators-cb4ng\" (UID: \"db1dcb29-b751-4a77-8f2e-68efadf955b9\") " pod="openshift-marketplace/community-operators-cb4ng" Dec 05 17:06:48 crc kubenswrapper[4753]: I1205 17:06:48.217911 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rb4kw\" (UniqueName: \"kubernetes.io/projected/07c18cf7-d24d-46d2-90c1-0ef186b1c434-kube-api-access-rb4kw\") pod \"certified-operators-t2qjv\" (UID: \"07c18cf7-d24d-46d2-90c1-0ef186b1c434\") " pod="openshift-marketplace/certified-operators-t2qjv" Dec 05 17:06:48 crc kubenswrapper[4753]: I1205 17:06:48.218339 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/07c18cf7-d24d-46d2-90c1-0ef186b1c434-utilities\") pod \"certified-operators-t2qjv\" (UID: \"07c18cf7-d24d-46d2-90c1-0ef186b1c434\") " pod="openshift-marketplace/certified-operators-t2qjv" Dec 05 17:06:48 crc kubenswrapper[4753]: I1205 17:06:48.218370 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/07c18cf7-d24d-46d2-90c1-0ef186b1c434-catalog-content\") pod \"certified-operators-t2qjv\" (UID: \"07c18cf7-d24d-46d2-90c1-0ef186b1c434\") " pod="openshift-marketplace/certified-operators-t2qjv" Dec 05 17:06:48 crc kubenswrapper[4753]: I1205 17:06:48.288222 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-qt9vk"] Dec 05 17:06:48 crc kubenswrapper[4753]: I1205 17:06:48.289549 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qt9vk" Dec 05 17:06:48 crc kubenswrapper[4753]: I1205 17:06:48.313627 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cb4ng" Dec 05 17:06:48 crc kubenswrapper[4753]: I1205 17:06:48.321739 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/07c18cf7-d24d-46d2-90c1-0ef186b1c434-catalog-content\") pod \"certified-operators-t2qjv\" (UID: \"07c18cf7-d24d-46d2-90c1-0ef186b1c434\") " pod="openshift-marketplace/certified-operators-t2qjv" Dec 05 17:06:48 crc kubenswrapper[4753]: I1205 17:06:48.321802 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a495df2-eeed-4f2a-b492-cf5aaaaab8f9-utilities\") pod \"community-operators-qt9vk\" (UID: \"9a495df2-eeed-4f2a-b492-cf5aaaaab8f9\") " pod="openshift-marketplace/community-operators-qt9vk" Dec 05 17:06:48 crc kubenswrapper[4753]: I1205 17:06:48.321887 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g5zgt\" (UniqueName: \"kubernetes.io/projected/9a495df2-eeed-4f2a-b492-cf5aaaaab8f9-kube-api-access-g5zgt\") pod \"community-operators-qt9vk\" (UID: \"9a495df2-eeed-4f2a-b492-cf5aaaaab8f9\") " pod="openshift-marketplace/community-operators-qt9vk" Dec 05 17:06:48 crc kubenswrapper[4753]: I1205 17:06:48.321941 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rb4kw\" (UniqueName: \"kubernetes.io/projected/07c18cf7-d24d-46d2-90c1-0ef186b1c434-kube-api-access-rb4kw\") pod \"certified-operators-t2qjv\" (UID: \"07c18cf7-d24d-46d2-90c1-0ef186b1c434\") " pod="openshift-marketplace/certified-operators-t2qjv" Dec 05 17:06:48 crc kubenswrapper[4753]: I1205 17:06:48.321965 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a495df2-eeed-4f2a-b492-cf5aaaaab8f9-catalog-content\") pod \"community-operators-qt9vk\" (UID: \"9a495df2-eeed-4f2a-b492-cf5aaaaab8f9\") " pod="openshift-marketplace/community-operators-qt9vk" Dec 05 17:06:48 crc kubenswrapper[4753]: I1205 17:06:48.321984 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/07c18cf7-d24d-46d2-90c1-0ef186b1c434-utilities\") pod \"certified-operators-t2qjv\" (UID: \"07c18cf7-d24d-46d2-90c1-0ef186b1c434\") " pod="openshift-marketplace/certified-operators-t2qjv" Dec 05 17:06:48 crc kubenswrapper[4753]: I1205 17:06:48.322576 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/07c18cf7-d24d-46d2-90c1-0ef186b1c434-utilities\") pod \"certified-operators-t2qjv\" (UID: \"07c18cf7-d24d-46d2-90c1-0ef186b1c434\") " pod="openshift-marketplace/certified-operators-t2qjv" Dec 05 17:06:48 crc kubenswrapper[4753]: I1205 17:06:48.322615 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/07c18cf7-d24d-46d2-90c1-0ef186b1c434-catalog-content\") pod \"certified-operators-t2qjv\" (UID: \"07c18cf7-d24d-46d2-90c1-0ef186b1c434\") " pod="openshift-marketplace/certified-operators-t2qjv" Dec 05 17:06:48 crc kubenswrapper[4753]: I1205 17:06:48.324662 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-qt9vk"] Dec 05 17:06:48 crc kubenswrapper[4753]: I1205 17:06:48.369910 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rb4kw\" (UniqueName: \"kubernetes.io/projected/07c18cf7-d24d-46d2-90c1-0ef186b1c434-kube-api-access-rb4kw\") pod \"certified-operators-t2qjv\" (UID: \"07c18cf7-d24d-46d2-90c1-0ef186b1c434\") " pod="openshift-marketplace/certified-operators-t2qjv" Dec 05 17:06:48 crc kubenswrapper[4753]: I1205 17:06:48.424058 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a495df2-eeed-4f2a-b492-cf5aaaaab8f9-utilities\") pod \"community-operators-qt9vk\" (UID: \"9a495df2-eeed-4f2a-b492-cf5aaaaab8f9\") " pod="openshift-marketplace/community-operators-qt9vk" Dec 05 17:06:48 crc kubenswrapper[4753]: I1205 17:06:48.424252 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g5zgt\" (UniqueName: \"kubernetes.io/projected/9a495df2-eeed-4f2a-b492-cf5aaaaab8f9-kube-api-access-g5zgt\") pod \"community-operators-qt9vk\" (UID: \"9a495df2-eeed-4f2a-b492-cf5aaaaab8f9\") " pod="openshift-marketplace/community-operators-qt9vk" Dec 05 17:06:48 crc kubenswrapper[4753]: I1205 17:06:48.424312 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a495df2-eeed-4f2a-b492-cf5aaaaab8f9-catalog-content\") pod \"community-operators-qt9vk\" (UID: \"9a495df2-eeed-4f2a-b492-cf5aaaaab8f9\") " pod="openshift-marketplace/community-operators-qt9vk" Dec 05 17:06:48 crc kubenswrapper[4753]: I1205 17:06:48.425462 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a495df2-eeed-4f2a-b492-cf5aaaaab8f9-catalog-content\") pod \"community-operators-qt9vk\" (UID: \"9a495df2-eeed-4f2a-b492-cf5aaaaab8f9\") " pod="openshift-marketplace/community-operators-qt9vk" Dec 05 17:06:48 crc kubenswrapper[4753]: I1205 17:06:48.425755 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a495df2-eeed-4f2a-b492-cf5aaaaab8f9-utilities\") pod \"community-operators-qt9vk\" (UID: \"9a495df2-eeed-4f2a-b492-cf5aaaaab8f9\") " pod="openshift-marketplace/community-operators-qt9vk" Dec 05 17:06:48 crc kubenswrapper[4753]: I1205 17:06:48.433499 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4vvd9\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:48 crc kubenswrapper[4753]: I1205 17:06:48.474021 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g5zgt\" (UniqueName: \"kubernetes.io/projected/9a495df2-eeed-4f2a-b492-cf5aaaaab8f9-kube-api-access-g5zgt\") pod \"community-operators-qt9vk\" (UID: \"9a495df2-eeed-4f2a-b492-cf5aaaaab8f9\") " pod="openshift-marketplace/community-operators-qt9vk" Dec 05 17:06:48 crc kubenswrapper[4753]: I1205 17:06:48.492350 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-djddj" event={"ID":"24e4585c-4850-4101-a40d-b38424860805","Type":"ContainerStarted","Data":"cb23e26dd7bd6458e69ec338d0547de31eb887a6543d538229216da22871ed3e"} Dec 05 17:06:48 crc kubenswrapper[4753]: I1205 17:06:48.492775 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-djddj" event={"ID":"24e4585c-4850-4101-a40d-b38424860805","Type":"ContainerStarted","Data":"73d6e1b6d2eec76042515874a7e1ede7ac1a7fe97f5077a65dbbb37ca9ba535d"} Dec 05 17:06:48 crc kubenswrapper[4753]: I1205 17:06:48.499462 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:48 crc kubenswrapper[4753]: I1205 17:06:48.513784 4753 generic.go:334] "Generic (PLEG): container finished" podID="c18256b2-4287-4df0-8819-201cf14c6380" containerID="89de624176223c811cb2f57a41e6098fc22334831d8859e206efa904e026fa14" exitCode=0 Dec 05 17:06:48 crc kubenswrapper[4753]: I1205 17:06:48.514639 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415900-2lmxf" event={"ID":"c18256b2-4287-4df0-8819-201cf14c6380","Type":"ContainerDied","Data":"89de624176223c811cb2f57a41e6098fc22334831d8859e206efa904e026fa14"} Dec 05 17:06:48 crc kubenswrapper[4753]: I1205 17:06:48.518353 4753 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-s87ds container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.40:8080/healthz\": dial tcp 10.217.0.40:8080: connect: connection refused" start-of-body= Dec 05 17:06:48 crc kubenswrapper[4753]: I1205 17:06:48.518411 4753 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-s87ds" podUID="03fd3f93-fb41-4e32-8276-416c40f2b9a7" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.40:8080/healthz\": dial tcp 10.217.0.40:8080: connect: connection refused" Dec 05 17:06:48 crc kubenswrapper[4753]: I1205 17:06:48.550310 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-9j8t7" Dec 05 17:06:48 crc kubenswrapper[4753]: I1205 17:06:48.639934 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qt9vk" Dec 05 17:06:48 crc kubenswrapper[4753]: I1205 17:06:48.804113 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 05 17:06:48 crc kubenswrapper[4753]: I1205 17:06:48.806199 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-t2qjv" Dec 05 17:06:48 crc kubenswrapper[4753]: I1205 17:06:48.809392 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dgpzc" Dec 05 17:06:48 crc kubenswrapper[4753]: I1205 17:06:48.838221 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:06:48 crc kubenswrapper[4753]: I1205 17:06:48.838288 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:06:48 crc kubenswrapper[4753]: I1205 17:06:48.838312 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:06:48 crc kubenswrapper[4753]: I1205 17:06:48.838338 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:06:48 crc kubenswrapper[4753]: I1205 17:06:48.866813 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:06:48 crc kubenswrapper[4753]: I1205 17:06:48.866841 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:06:48 crc kubenswrapper[4753]: I1205 17:06:48.873921 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:06:48 crc kubenswrapper[4753]: I1205 17:06:48.953422 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:06:49 crc kubenswrapper[4753]: I1205 17:06:49.038682 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:06:49 crc kubenswrapper[4753]: I1205 17:06:49.121859 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:06:49 crc kubenswrapper[4753]: I1205 17:06:49.125845 4753 patch_prober.go:28] interesting pod/router-default-5444994796-9tzjp container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 17:06:49 crc kubenswrapper[4753]: [-]has-synced failed: reason withheld Dec 05 17:06:49 crc kubenswrapper[4753]: [+]process-running ok Dec 05 17:06:49 crc kubenswrapper[4753]: healthz check failed Dec 05 17:06:49 crc kubenswrapper[4753]: I1205 17:06:49.126480 4753 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-9tzjp" podUID="de65df6c-3a9a-4041-a67c-e6cbd766b4b2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 17:06:49 crc kubenswrapper[4753]: I1205 17:06:49.235413 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:06:49 crc kubenswrapper[4753]: I1205 17:06:49.306235 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415900-2lmxf" Dec 05 17:06:49 crc kubenswrapper[4753]: I1205 17:06:49.326429 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-cb4ng"] Dec 05 17:06:49 crc kubenswrapper[4753]: I1205 17:06:49.349319 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c18256b2-4287-4df0-8819-201cf14c6380-secret-volume\") pod \"c18256b2-4287-4df0-8819-201cf14c6380\" (UID: \"c18256b2-4287-4df0-8819-201cf14c6380\") " Dec 05 17:06:49 crc kubenswrapper[4753]: I1205 17:06:49.349435 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c18256b2-4287-4df0-8819-201cf14c6380-config-volume\") pod \"c18256b2-4287-4df0-8819-201cf14c6380\" (UID: \"c18256b2-4287-4df0-8819-201cf14c6380\") " Dec 05 17:06:49 crc kubenswrapper[4753]: I1205 17:06:49.349527 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jwnb8\" (UniqueName: \"kubernetes.io/projected/c18256b2-4287-4df0-8819-201cf14c6380-kube-api-access-jwnb8\") pod \"c18256b2-4287-4df0-8819-201cf14c6380\" (UID: \"c18256b2-4287-4df0-8819-201cf14c6380\") " Dec 05 17:06:49 crc kubenswrapper[4753]: I1205 17:06:49.353722 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c18256b2-4287-4df0-8819-201cf14c6380-config-volume" (OuterVolumeSpecName: "config-volume") pod "c18256b2-4287-4df0-8819-201cf14c6380" (UID: "c18256b2-4287-4df0-8819-201cf14c6380"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:06:49 crc kubenswrapper[4753]: I1205 17:06:49.382791 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c18256b2-4287-4df0-8819-201cf14c6380-kube-api-access-jwnb8" (OuterVolumeSpecName: "kube-api-access-jwnb8") pod "c18256b2-4287-4df0-8819-201cf14c6380" (UID: "c18256b2-4287-4df0-8819-201cf14c6380"). InnerVolumeSpecName "kube-api-access-jwnb8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:06:49 crc kubenswrapper[4753]: I1205 17:06:49.387409 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c18256b2-4287-4df0-8819-201cf14c6380-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "c18256b2-4287-4df0-8819-201cf14c6380" (UID: "c18256b2-4287-4df0-8819-201cf14c6380"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:06:49 crc kubenswrapper[4753]: I1205 17:06:49.403955 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-4vvd9"] Dec 05 17:06:49 crc kubenswrapper[4753]: I1205 17:06:49.454137 4753 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c18256b2-4287-4df0-8819-201cf14c6380-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 17:06:49 crc kubenswrapper[4753]: I1205 17:06:49.454188 4753 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c18256b2-4287-4df0-8819-201cf14c6380-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 17:06:49 crc kubenswrapper[4753]: I1205 17:06:49.454202 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jwnb8\" (UniqueName: \"kubernetes.io/projected/c18256b2-4287-4df0-8819-201cf14c6380-kube-api-access-jwnb8\") on node \"crc\" DevicePath \"\"" Dec 05 17:06:49 crc kubenswrapper[4753]: I1205 17:06:49.482251 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-qt9vk"] Dec 05 17:06:49 crc kubenswrapper[4753]: I1205 17:06:49.488785 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-dgpzc"] Dec 05 17:06:49 crc kubenswrapper[4753]: W1205 17:06:49.535027 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9a495df2_eeed_4f2a_b492_cf5aaaaab8f9.slice/crio-0ad1763cb3f495581aaf2e6969b87aebbb92d87bf8d65c4a989126bd4c81479c WatchSource:0}: Error finding container 0ad1763cb3f495581aaf2e6969b87aebbb92d87bf8d65c4a989126bd4c81479c: Status 404 returned error can't find the container with id 0ad1763cb3f495581aaf2e6969b87aebbb92d87bf8d65c4a989126bd4c81479c Dec 05 17:06:49 crc kubenswrapper[4753]: I1205 17:06:49.571900 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415900-2lmxf" event={"ID":"c18256b2-4287-4df0-8819-201cf14c6380","Type":"ContainerDied","Data":"d0aa2cfd4ee444d4e2c1e30dd674db80a2bfaeda0a0e49f9b1d208125591adcf"} Dec 05 17:06:49 crc kubenswrapper[4753]: I1205 17:06:49.571967 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d0aa2cfd4ee444d4e2c1e30dd674db80a2bfaeda0a0e49f9b1d208125591adcf" Dec 05 17:06:49 crc kubenswrapper[4753]: I1205 17:06:49.572108 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415900-2lmxf" Dec 05 17:06:49 crc kubenswrapper[4753]: I1205 17:06:49.581244 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-t2qjv"] Dec 05 17:06:49 crc kubenswrapper[4753]: I1205 17:06:49.610337 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cb4ng" event={"ID":"db1dcb29-b751-4a77-8f2e-68efadf955b9","Type":"ContainerStarted","Data":"7f1e6bfaf13c4959842c0f22c4aa4f4a2edb8f397a72edb5b610fe6797187a5c"} Dec 05 17:06:49 crc kubenswrapper[4753]: I1205 17:06:49.617448 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" event={"ID":"abca9c47-e52b-4410-83e1-b486f8f01aca","Type":"ContainerStarted","Data":"3809342bf67fd621fbeb0c0a92c72f0904ee3d0994b143784f641de8ccbd319f"} Dec 05 17:06:49 crc kubenswrapper[4753]: I1205 17:06:49.641472 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-djddj" event={"ID":"24e4585c-4850-4101-a40d-b38424860805","Type":"ContainerStarted","Data":"45dbda81e7c894f1e74942ec74a804137d109018f534fdd7ac71317ab63d7c66"} Dec 05 17:06:49 crc kubenswrapper[4753]: I1205 17:06:49.676207 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-djddj" podStartSLOduration=11.676189332 podStartE2EDuration="11.676189332s" podCreationTimestamp="2025-12-05 17:06:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:06:49.669119464 +0000 UTC m=+148.172226470" watchObservedRunningTime="2025-12-05 17:06:49.676189332 +0000 UTC m=+148.179296338" Dec 05 17:06:49 crc kubenswrapper[4753]: I1205 17:06:49.742019 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Dec 05 17:06:49 crc kubenswrapper[4753]: W1205 17:06:49.941446 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3b6479f0_333b_4a96_9adf_2099afdc2447.slice/crio-27399f5fd5a599e954217e8ba7317a88142652042a6e7a93ba5784415e02ab31 WatchSource:0}: Error finding container 27399f5fd5a599e954217e8ba7317a88142652042a6e7a93ba5784415e02ab31: Status 404 returned error can't find the container with id 27399f5fd5a599e954217e8ba7317a88142652042a6e7a93ba5784415e02ab31 Dec 05 17:06:49 crc kubenswrapper[4753]: W1205 17:06:49.942159 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5fe485a1_e14f_4c09_b5b9_f252bc42b7e8.slice/crio-f768b46c65eaee621d56251dc12fbe05bf2c854164901ff3611b42922b9291e0 WatchSource:0}: Error finding container f768b46c65eaee621d56251dc12fbe05bf2c854164901ff3611b42922b9291e0: Status 404 returned error can't find the container with id f768b46c65eaee621d56251dc12fbe05bf2c854164901ff3611b42922b9291e0 Dec 05 17:06:50 crc kubenswrapper[4753]: W1205 17:06:50.063986 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9d751cbb_f2e2_430d_9754_c882a5e924a5.slice/crio-bdcc92dc3b3d14cbd780582742e111dcacf930d74d4f15c7e0217b9a61e416b6 WatchSource:0}: Error finding container bdcc92dc3b3d14cbd780582742e111dcacf930d74d4f15c7e0217b9a61e416b6: Status 404 returned error can't find the container with id bdcc92dc3b3d14cbd780582742e111dcacf930d74d4f15c7e0217b9a61e416b6 Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.075075 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-c4kqm"] Dec 05 17:06:50 crc kubenswrapper[4753]: E1205 17:06:50.075614 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c18256b2-4287-4df0-8819-201cf14c6380" containerName="collect-profiles" Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.075630 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="c18256b2-4287-4df0-8819-201cf14c6380" containerName="collect-profiles" Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.075745 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="c18256b2-4287-4df0-8819-201cf14c6380" containerName="collect-profiles" Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.076626 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-c4kqm" Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.079229 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.084860 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-c4kqm"] Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.086638 4753 patch_prober.go:28] interesting pod/router-default-5444994796-9tzjp container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 17:06:50 crc kubenswrapper[4753]: [-]has-synced failed: reason withheld Dec 05 17:06:50 crc kubenswrapper[4753]: [+]process-running ok Dec 05 17:06:50 crc kubenswrapper[4753]: healthz check failed Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.086701 4753 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-9tzjp" podUID="de65df6c-3a9a-4041-a67c-e6cbd766b4b2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.183602 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee9a0d97-2cf8-48bc-b90a-59f0c4c3b76b-catalog-content\") pod \"redhat-marketplace-c4kqm\" (UID: \"ee9a0d97-2cf8-48bc-b90a-59f0c4c3b76b\") " pod="openshift-marketplace/redhat-marketplace-c4kqm" Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.183644 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee9a0d97-2cf8-48bc-b90a-59f0c4c3b76b-utilities\") pod \"redhat-marketplace-c4kqm\" (UID: \"ee9a0d97-2cf8-48bc-b90a-59f0c4c3b76b\") " pod="openshift-marketplace/redhat-marketplace-c4kqm" Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.183683 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v49dc\" (UniqueName: \"kubernetes.io/projected/ee9a0d97-2cf8-48bc-b90a-59f0c4c3b76b-kube-api-access-v49dc\") pod \"redhat-marketplace-c4kqm\" (UID: \"ee9a0d97-2cf8-48bc-b90a-59f0c4c3b76b\") " pod="openshift-marketplace/redhat-marketplace-c4kqm" Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.284883 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee9a0d97-2cf8-48bc-b90a-59f0c4c3b76b-catalog-content\") pod \"redhat-marketplace-c4kqm\" (UID: \"ee9a0d97-2cf8-48bc-b90a-59f0c4c3b76b\") " pod="openshift-marketplace/redhat-marketplace-c4kqm" Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.285316 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee9a0d97-2cf8-48bc-b90a-59f0c4c3b76b-utilities\") pod \"redhat-marketplace-c4kqm\" (UID: \"ee9a0d97-2cf8-48bc-b90a-59f0c4c3b76b\") " pod="openshift-marketplace/redhat-marketplace-c4kqm" Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.285365 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v49dc\" (UniqueName: \"kubernetes.io/projected/ee9a0d97-2cf8-48bc-b90a-59f0c4c3b76b-kube-api-access-v49dc\") pod \"redhat-marketplace-c4kqm\" (UID: \"ee9a0d97-2cf8-48bc-b90a-59f0c4c3b76b\") " pod="openshift-marketplace/redhat-marketplace-c4kqm" Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.285439 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee9a0d97-2cf8-48bc-b90a-59f0c4c3b76b-catalog-content\") pod \"redhat-marketplace-c4kqm\" (UID: \"ee9a0d97-2cf8-48bc-b90a-59f0c4c3b76b\") " pod="openshift-marketplace/redhat-marketplace-c4kqm" Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.285806 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee9a0d97-2cf8-48bc-b90a-59f0c4c3b76b-utilities\") pod \"redhat-marketplace-c4kqm\" (UID: \"ee9a0d97-2cf8-48bc-b90a-59f0c4c3b76b\") " pod="openshift-marketplace/redhat-marketplace-c4kqm" Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.304310 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v49dc\" (UniqueName: \"kubernetes.io/projected/ee9a0d97-2cf8-48bc-b90a-59f0c4c3b76b-kube-api-access-v49dc\") pod \"redhat-marketplace-c4kqm\" (UID: \"ee9a0d97-2cf8-48bc-b90a-59f0c4c3b76b\") " pod="openshift-marketplace/redhat-marketplace-c4kqm" Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.343104 4753 patch_prober.go:28] interesting pod/downloads-7954f5f757-2nb5t container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" start-of-body= Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.343179 4753 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-2nb5t" podUID="f119bafa-ed3a-42d2-876f-c63999b216e1" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.343310 4753 patch_prober.go:28] interesting pod/downloads-7954f5f757-2nb5t container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" start-of-body= Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.343456 4753 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-2nb5t" podUID="f119bafa-ed3a-42d2-876f-c63999b216e1" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.454026 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-c4kqm" Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.482937 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-64x2r"] Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.483866 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-64x2r" Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.519533 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-64x2r"] Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.548445 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-rkmxh" Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.548481 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-rkmxh" Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.573241 4753 patch_prober.go:28] interesting pod/apiserver-76f77b778f-rkmxh container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Dec 05 17:06:50 crc kubenswrapper[4753]: [+]log ok Dec 05 17:06:50 crc kubenswrapper[4753]: [+]etcd ok Dec 05 17:06:50 crc kubenswrapper[4753]: [+]poststarthook/start-apiserver-admission-initializer ok Dec 05 17:06:50 crc kubenswrapper[4753]: [+]poststarthook/generic-apiserver-start-informers ok Dec 05 17:06:50 crc kubenswrapper[4753]: [+]poststarthook/max-in-flight-filter ok Dec 05 17:06:50 crc kubenswrapper[4753]: [+]poststarthook/storage-object-count-tracker-hook ok Dec 05 17:06:50 crc kubenswrapper[4753]: [+]poststarthook/image.openshift.io-apiserver-caches ok Dec 05 17:06:50 crc kubenswrapper[4753]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Dec 05 17:06:50 crc kubenswrapper[4753]: [-]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa failed: reason withheld Dec 05 17:06:50 crc kubenswrapper[4753]: [+]poststarthook/project.openshift.io-projectcache ok Dec 05 17:06:50 crc kubenswrapper[4753]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Dec 05 17:06:50 crc kubenswrapper[4753]: [+]poststarthook/openshift.io-startinformers ok Dec 05 17:06:50 crc kubenswrapper[4753]: [+]poststarthook/openshift.io-restmapperupdater ok Dec 05 17:06:50 crc kubenswrapper[4753]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Dec 05 17:06:50 crc kubenswrapper[4753]: livez check failed Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.573339 4753 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-rkmxh" podUID="782feb3b-5fe4-413e-87a0-9602f412897e" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.582549 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zdzdl" Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.582589 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zdzdl" Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.593830 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wqw9m\" (UniqueName: \"kubernetes.io/projected/c1873906-73e5-4c22-b547-233eb99f1562-kube-api-access-wqw9m\") pod \"redhat-marketplace-64x2r\" (UID: \"c1873906-73e5-4c22-b547-233eb99f1562\") " pod="openshift-marketplace/redhat-marketplace-64x2r" Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.593902 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1873906-73e5-4c22-b547-233eb99f1562-catalog-content\") pod \"redhat-marketplace-64x2r\" (UID: \"c1873906-73e5-4c22-b547-233eb99f1562\") " pod="openshift-marketplace/redhat-marketplace-64x2r" Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.593934 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1873906-73e5-4c22-b547-233eb99f1562-utilities\") pod \"redhat-marketplace-64x2r\" (UID: \"c1873906-73e5-4c22-b547-233eb99f1562\") " pod="openshift-marketplace/redhat-marketplace-64x2r" Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.614372 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zdzdl" Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.649396 4753 generic.go:334] "Generic (PLEG): container finished" podID="db1dcb29-b751-4a77-8f2e-68efadf955b9" containerID="c66118d18d05a81bf20576fda36ba01bd90d14a27420d888c158eeda733dc33c" exitCode=0 Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.649446 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cb4ng" event={"ID":"db1dcb29-b751-4a77-8f2e-68efadf955b9","Type":"ContainerDied","Data":"c66118d18d05a81bf20576fda36ba01bd90d14a27420d888c158eeda733dc33c"} Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.651383 4753 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.670724 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"9ade9a090e974be72486d0bcc64425b12a464a1a5c18454866607db8a10633db"} Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.671182 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"bdcc92dc3b3d14cbd780582742e111dcacf930d74d4f15c7e0217b9a61e416b6"} Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.685094 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"7d0f9da4a26c9e3c628f1d76ea6ae253d8578f00e2b6e72aefd2fb5e00892da8"} Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.685139 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"f768b46c65eaee621d56251dc12fbe05bf2c854164901ff3611b42922b9291e0"} Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.685967 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-9c622" Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.686259 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-9c622" Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.688069 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" event={"ID":"abca9c47-e52b-4410-83e1-b486f8f01aca","Type":"ContainerStarted","Data":"b17c4d232ba927af4baa91d985bf94d32f9e39a0b5b54283b901285c941dcf57"} Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.688461 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.693412 4753 patch_prober.go:28] interesting pod/console-f9d7485db-9c622 container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.17:8443/health\": dial tcp 10.217.0.17:8443: connect: connection refused" start-of-body= Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.693482 4753 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-9c622" podUID="48038042-7b0f-48d9-9f90-6c0b9dd179d6" containerName="console" probeResult="failure" output="Get \"https://10.217.0.17:8443/health\": dial tcp 10.217.0.17:8443: connect: connection refused" Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.698012 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1873906-73e5-4c22-b547-233eb99f1562-utilities\") pod \"redhat-marketplace-64x2r\" (UID: \"c1873906-73e5-4c22-b547-233eb99f1562\") " pod="openshift-marketplace/redhat-marketplace-64x2r" Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.698193 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wqw9m\" (UniqueName: \"kubernetes.io/projected/c1873906-73e5-4c22-b547-233eb99f1562-kube-api-access-wqw9m\") pod \"redhat-marketplace-64x2r\" (UID: \"c1873906-73e5-4c22-b547-233eb99f1562\") " pod="openshift-marketplace/redhat-marketplace-64x2r" Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.698254 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1873906-73e5-4c22-b547-233eb99f1562-catalog-content\") pod \"redhat-marketplace-64x2r\" (UID: \"c1873906-73e5-4c22-b547-233eb99f1562\") " pod="openshift-marketplace/redhat-marketplace-64x2r" Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.698685 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1873906-73e5-4c22-b547-233eb99f1562-catalog-content\") pod \"redhat-marketplace-64x2r\" (UID: \"c1873906-73e5-4c22-b547-233eb99f1562\") " pod="openshift-marketplace/redhat-marketplace-64x2r" Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.700305 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1873906-73e5-4c22-b547-233eb99f1562-utilities\") pod \"redhat-marketplace-64x2r\" (UID: \"c1873906-73e5-4c22-b547-233eb99f1562\") " pod="openshift-marketplace/redhat-marketplace-64x2r" Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.702358 4753 generic.go:334] "Generic (PLEG): container finished" podID="5560048a-c9e9-4743-9573-1b58a2240c29" containerID="be6abb93820e6c514a3cbe70abfbb8307e0ad38c3230dd947d8aa001d63a7a4c" exitCode=0 Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.702495 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dgpzc" event={"ID":"5560048a-c9e9-4743-9573-1b58a2240c29","Type":"ContainerDied","Data":"be6abb93820e6c514a3cbe70abfbb8307e0ad38c3230dd947d8aa001d63a7a4c"} Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.702536 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dgpzc" event={"ID":"5560048a-c9e9-4743-9573-1b58a2240c29","Type":"ContainerStarted","Data":"09458331ac649eca958921cb2fd470111657af9ec584895fcfc99390f4803626"} Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.718870 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"13f88f27f192f0d50c5c16bd306169d1b5ffb907831f03cd2405e67efea45a2f"} Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.719027 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"27399f5fd5a599e954217e8ba7317a88142652042a6e7a93ba5784415e02ab31"} Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.719656 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.720979 4753 generic.go:334] "Generic (PLEG): container finished" podID="07c18cf7-d24d-46d2-90c1-0ef186b1c434" containerID="d50b5204633464a4d571bd81e8c24206e87b72f9e82d515bb49df7e51bc371ba" exitCode=0 Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.721060 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t2qjv" event={"ID":"07c18cf7-d24d-46d2-90c1-0ef186b1c434","Type":"ContainerDied","Data":"d50b5204633464a4d571bd81e8c24206e87b72f9e82d515bb49df7e51bc371ba"} Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.721079 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t2qjv" event={"ID":"07c18cf7-d24d-46d2-90c1-0ef186b1c434","Type":"ContainerStarted","Data":"498064a4a81201cbb5ec54c37d4703ba1ae3303800eb2bf9a24f4d1222ac4d33"} Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.730484 4753 generic.go:334] "Generic (PLEG): container finished" podID="9a495df2-eeed-4f2a-b492-cf5aaaaab8f9" containerID="29ff8ebe6b38250b5aecad785cb1281f6b97a105c721d7fd15e1706af717ea54" exitCode=0 Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.730600 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qt9vk" event={"ID":"9a495df2-eeed-4f2a-b492-cf5aaaaab8f9","Type":"ContainerDied","Data":"29ff8ebe6b38250b5aecad785cb1281f6b97a105c721d7fd15e1706af717ea54"} Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.730695 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qt9vk" event={"ID":"9a495df2-eeed-4f2a-b492-cf5aaaaab8f9","Type":"ContainerStarted","Data":"0ad1763cb3f495581aaf2e6969b87aebbb92d87bf8d65c4a989126bd4c81479c"} Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.734538 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wqw9m\" (UniqueName: \"kubernetes.io/projected/c1873906-73e5-4c22-b547-233eb99f1562-kube-api-access-wqw9m\") pod \"redhat-marketplace-64x2r\" (UID: \"c1873906-73e5-4c22-b547-233eb99f1562\") " pod="openshift-marketplace/redhat-marketplace-64x2r" Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.747516 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-zdzdl" Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.785243 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" podStartSLOduration=128.785213635 podStartE2EDuration="2m8.785213635s" podCreationTimestamp="2025-12-05 17:04:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:06:50.781787634 +0000 UTC m=+149.284894650" watchObservedRunningTime="2025-12-05 17:06:50.785213635 +0000 UTC m=+149.288320641" Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.863518 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-64x2r" Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.884600 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-s87ds" Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.902501 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-6ggrp"] Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.903850 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6ggrp" Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.906654 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.909943 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6ggrp"] Dec 05 17:06:50 crc kubenswrapper[4753]: I1205 17:06:50.914491 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-c4kqm"] Dec 05 17:06:51 crc kubenswrapper[4753]: I1205 17:06:51.009514 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fqs4q\" (UniqueName: \"kubernetes.io/projected/36b2ef01-2c7a-4313-a03a-be77a660d987-kube-api-access-fqs4q\") pod \"redhat-operators-6ggrp\" (UID: \"36b2ef01-2c7a-4313-a03a-be77a660d987\") " pod="openshift-marketplace/redhat-operators-6ggrp" Dec 05 17:06:51 crc kubenswrapper[4753]: I1205 17:06:51.009607 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/36b2ef01-2c7a-4313-a03a-be77a660d987-utilities\") pod \"redhat-operators-6ggrp\" (UID: \"36b2ef01-2c7a-4313-a03a-be77a660d987\") " pod="openshift-marketplace/redhat-operators-6ggrp" Dec 05 17:06:51 crc kubenswrapper[4753]: I1205 17:06:51.009727 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/36b2ef01-2c7a-4313-a03a-be77a660d987-catalog-content\") pod \"redhat-operators-6ggrp\" (UID: \"36b2ef01-2c7a-4313-a03a-be77a660d987\") " pod="openshift-marketplace/redhat-operators-6ggrp" Dec 05 17:06:51 crc kubenswrapper[4753]: I1205 17:06:51.080913 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-9tzjp" Dec 05 17:06:51 crc kubenswrapper[4753]: I1205 17:06:51.085843 4753 patch_prober.go:28] interesting pod/router-default-5444994796-9tzjp container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 17:06:51 crc kubenswrapper[4753]: [-]has-synced failed: reason withheld Dec 05 17:06:51 crc kubenswrapper[4753]: [+]process-running ok Dec 05 17:06:51 crc kubenswrapper[4753]: healthz check failed Dec 05 17:06:51 crc kubenswrapper[4753]: I1205 17:06:51.086207 4753 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-9tzjp" podUID="de65df6c-3a9a-4041-a67c-e6cbd766b4b2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 17:06:51 crc kubenswrapper[4753]: I1205 17:06:51.089313 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-wrrx9"] Dec 05 17:06:51 crc kubenswrapper[4753]: I1205 17:06:51.092177 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wrrx9" Dec 05 17:06:51 crc kubenswrapper[4753]: I1205 17:06:51.111614 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-wrrx9"] Dec 05 17:06:51 crc kubenswrapper[4753]: I1205 17:06:51.111665 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/36b2ef01-2c7a-4313-a03a-be77a660d987-catalog-content\") pod \"redhat-operators-6ggrp\" (UID: \"36b2ef01-2c7a-4313-a03a-be77a660d987\") " pod="openshift-marketplace/redhat-operators-6ggrp" Dec 05 17:06:51 crc kubenswrapper[4753]: I1205 17:06:51.111704 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fqs4q\" (UniqueName: \"kubernetes.io/projected/36b2ef01-2c7a-4313-a03a-be77a660d987-kube-api-access-fqs4q\") pod \"redhat-operators-6ggrp\" (UID: \"36b2ef01-2c7a-4313-a03a-be77a660d987\") " pod="openshift-marketplace/redhat-operators-6ggrp" Dec 05 17:06:51 crc kubenswrapper[4753]: I1205 17:06:51.111765 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fd4c8ac7-365b-4f24-b44c-b1cbc91b339b-utilities\") pod \"redhat-operators-wrrx9\" (UID: \"fd4c8ac7-365b-4f24-b44c-b1cbc91b339b\") " pod="openshift-marketplace/redhat-operators-wrrx9" Dec 05 17:06:51 crc kubenswrapper[4753]: I1205 17:06:51.111782 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/36b2ef01-2c7a-4313-a03a-be77a660d987-utilities\") pod \"redhat-operators-6ggrp\" (UID: \"36b2ef01-2c7a-4313-a03a-be77a660d987\") " pod="openshift-marketplace/redhat-operators-6ggrp" Dec 05 17:06:51 crc kubenswrapper[4753]: I1205 17:06:51.111827 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dxxbn\" (UniqueName: \"kubernetes.io/projected/fd4c8ac7-365b-4f24-b44c-b1cbc91b339b-kube-api-access-dxxbn\") pod \"redhat-operators-wrrx9\" (UID: \"fd4c8ac7-365b-4f24-b44c-b1cbc91b339b\") " pod="openshift-marketplace/redhat-operators-wrrx9" Dec 05 17:06:51 crc kubenswrapper[4753]: I1205 17:06:51.111843 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fd4c8ac7-365b-4f24-b44c-b1cbc91b339b-catalog-content\") pod \"redhat-operators-wrrx9\" (UID: \"fd4c8ac7-365b-4f24-b44c-b1cbc91b339b\") " pod="openshift-marketplace/redhat-operators-wrrx9" Dec 05 17:06:51 crc kubenswrapper[4753]: I1205 17:06:51.112452 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/36b2ef01-2c7a-4313-a03a-be77a660d987-catalog-content\") pod \"redhat-operators-6ggrp\" (UID: \"36b2ef01-2c7a-4313-a03a-be77a660d987\") " pod="openshift-marketplace/redhat-operators-6ggrp" Dec 05 17:06:51 crc kubenswrapper[4753]: I1205 17:06:51.112701 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/36b2ef01-2c7a-4313-a03a-be77a660d987-utilities\") pod \"redhat-operators-6ggrp\" (UID: \"36b2ef01-2c7a-4313-a03a-be77a660d987\") " pod="openshift-marketplace/redhat-operators-6ggrp" Dec 05 17:06:51 crc kubenswrapper[4753]: I1205 17:06:51.152915 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fqs4q\" (UniqueName: \"kubernetes.io/projected/36b2ef01-2c7a-4313-a03a-be77a660d987-kube-api-access-fqs4q\") pod \"redhat-operators-6ggrp\" (UID: \"36b2ef01-2c7a-4313-a03a-be77a660d987\") " pod="openshift-marketplace/redhat-operators-6ggrp" Dec 05 17:06:51 crc kubenswrapper[4753]: I1205 17:06:51.212905 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dxxbn\" (UniqueName: \"kubernetes.io/projected/fd4c8ac7-365b-4f24-b44c-b1cbc91b339b-kube-api-access-dxxbn\") pod \"redhat-operators-wrrx9\" (UID: \"fd4c8ac7-365b-4f24-b44c-b1cbc91b339b\") " pod="openshift-marketplace/redhat-operators-wrrx9" Dec 05 17:06:51 crc kubenswrapper[4753]: I1205 17:06:51.212960 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fd4c8ac7-365b-4f24-b44c-b1cbc91b339b-catalog-content\") pod \"redhat-operators-wrrx9\" (UID: \"fd4c8ac7-365b-4f24-b44c-b1cbc91b339b\") " pod="openshift-marketplace/redhat-operators-wrrx9" Dec 05 17:06:51 crc kubenswrapper[4753]: I1205 17:06:51.213036 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fd4c8ac7-365b-4f24-b44c-b1cbc91b339b-utilities\") pod \"redhat-operators-wrrx9\" (UID: \"fd4c8ac7-365b-4f24-b44c-b1cbc91b339b\") " pod="openshift-marketplace/redhat-operators-wrrx9" Dec 05 17:06:51 crc kubenswrapper[4753]: I1205 17:06:51.214062 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fd4c8ac7-365b-4f24-b44c-b1cbc91b339b-utilities\") pod \"redhat-operators-wrrx9\" (UID: \"fd4c8ac7-365b-4f24-b44c-b1cbc91b339b\") " pod="openshift-marketplace/redhat-operators-wrrx9" Dec 05 17:06:51 crc kubenswrapper[4753]: I1205 17:06:51.214657 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fd4c8ac7-365b-4f24-b44c-b1cbc91b339b-catalog-content\") pod \"redhat-operators-wrrx9\" (UID: \"fd4c8ac7-365b-4f24-b44c-b1cbc91b339b\") " pod="openshift-marketplace/redhat-operators-wrrx9" Dec 05 17:06:51 crc kubenswrapper[4753]: I1205 17:06:51.244318 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6ggrp" Dec 05 17:06:51 crc kubenswrapper[4753]: I1205 17:06:51.262038 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-64x2r"] Dec 05 17:06:51 crc kubenswrapper[4753]: I1205 17:06:51.271668 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dxxbn\" (UniqueName: \"kubernetes.io/projected/fd4c8ac7-365b-4f24-b44c-b1cbc91b339b-kube-api-access-dxxbn\") pod \"redhat-operators-wrrx9\" (UID: \"fd4c8ac7-365b-4f24-b44c-b1cbc91b339b\") " pod="openshift-marketplace/redhat-operators-wrrx9" Dec 05 17:06:51 crc kubenswrapper[4753]: I1205 17:06:51.456498 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wrrx9" Dec 05 17:06:51 crc kubenswrapper[4753]: I1205 17:06:51.511109 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6ggrp"] Dec 05 17:06:51 crc kubenswrapper[4753]: W1205 17:06:51.521341 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod36b2ef01_2c7a_4313_a03a_be77a660d987.slice/crio-fa0e729857e34deff1436c162cff22b0b1de2a3b947841d6ad007e5ea894f594 WatchSource:0}: Error finding container fa0e729857e34deff1436c162cff22b0b1de2a3b947841d6ad007e5ea894f594: Status 404 returned error can't find the container with id fa0e729857e34deff1436c162cff22b0b1de2a3b947841d6ad007e5ea894f594 Dec 05 17:06:51 crc kubenswrapper[4753]: I1205 17:06:51.746251 4753 generic.go:334] "Generic (PLEG): container finished" podID="c1873906-73e5-4c22-b547-233eb99f1562" containerID="8c07b28b001da477d5287627a69ec33a1a86d852b40b0af791d4da25d940d6e5" exitCode=0 Dec 05 17:06:51 crc kubenswrapper[4753]: I1205 17:06:51.746539 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-64x2r" event={"ID":"c1873906-73e5-4c22-b547-233eb99f1562","Type":"ContainerDied","Data":"8c07b28b001da477d5287627a69ec33a1a86d852b40b0af791d4da25d940d6e5"} Dec 05 17:06:51 crc kubenswrapper[4753]: I1205 17:06:51.746716 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-64x2r" event={"ID":"c1873906-73e5-4c22-b547-233eb99f1562","Type":"ContainerStarted","Data":"dc75d39c3110787140772965b42a2a67699a11602526d4283c1bbe2aef09ee5a"} Dec 05 17:06:51 crc kubenswrapper[4753]: I1205 17:06:51.749470 4753 generic.go:334] "Generic (PLEG): container finished" podID="ee9a0d97-2cf8-48bc-b90a-59f0c4c3b76b" containerID="2a49e811407605fc56b5bbf7867ff2858a9bc1521c3ce1c923e7542fb204d551" exitCode=0 Dec 05 17:06:51 crc kubenswrapper[4753]: I1205 17:06:51.749553 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-c4kqm" event={"ID":"ee9a0d97-2cf8-48bc-b90a-59f0c4c3b76b","Type":"ContainerDied","Data":"2a49e811407605fc56b5bbf7867ff2858a9bc1521c3ce1c923e7542fb204d551"} Dec 05 17:06:51 crc kubenswrapper[4753]: I1205 17:06:51.749590 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-c4kqm" event={"ID":"ee9a0d97-2cf8-48bc-b90a-59f0c4c3b76b","Type":"ContainerStarted","Data":"c0625d794b7997243e6de75dbfbd2ec5763c1d9b21a8af3ff0dc318ffb1bbf6a"} Dec 05 17:06:51 crc kubenswrapper[4753]: I1205 17:06:51.768358 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6ggrp" event={"ID":"36b2ef01-2c7a-4313-a03a-be77a660d987","Type":"ContainerStarted","Data":"fa0e729857e34deff1436c162cff22b0b1de2a3b947841d6ad007e5ea894f594"} Dec 05 17:06:51 crc kubenswrapper[4753]: I1205 17:06:51.780195 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-wrrx9"] Dec 05 17:06:52 crc kubenswrapper[4753]: I1205 17:06:52.085538 4753 patch_prober.go:28] interesting pod/router-default-5444994796-9tzjp container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 17:06:52 crc kubenswrapper[4753]: [-]has-synced failed: reason withheld Dec 05 17:06:52 crc kubenswrapper[4753]: [+]process-running ok Dec 05 17:06:52 crc kubenswrapper[4753]: healthz check failed Dec 05 17:06:52 crc kubenswrapper[4753]: I1205 17:06:52.085652 4753 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-9tzjp" podUID="de65df6c-3a9a-4041-a67c-e6cbd766b4b2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 17:06:52 crc kubenswrapper[4753]: I1205 17:06:52.797043 4753 generic.go:334] "Generic (PLEG): container finished" podID="36b2ef01-2c7a-4313-a03a-be77a660d987" containerID="cdd6a28ae4b4fe5b9b0fc581e624c7ca3a35a71767ab7a44fad7a21b58e7ccf3" exitCode=0 Dec 05 17:06:52 crc kubenswrapper[4753]: I1205 17:06:52.797156 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6ggrp" event={"ID":"36b2ef01-2c7a-4313-a03a-be77a660d987","Type":"ContainerDied","Data":"cdd6a28ae4b4fe5b9b0fc581e624c7ca3a35a71767ab7a44fad7a21b58e7ccf3"} Dec 05 17:06:52 crc kubenswrapper[4753]: I1205 17:06:52.802479 4753 generic.go:334] "Generic (PLEG): container finished" podID="fd4c8ac7-365b-4f24-b44c-b1cbc91b339b" containerID="4e8ce317ab59521ce59449fe2ece6a7288ff88e5abb1c5505a22d9117f1d4873" exitCode=0 Dec 05 17:06:52 crc kubenswrapper[4753]: I1205 17:06:52.802566 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wrrx9" event={"ID":"fd4c8ac7-365b-4f24-b44c-b1cbc91b339b","Type":"ContainerDied","Data":"4e8ce317ab59521ce59449fe2ece6a7288ff88e5abb1c5505a22d9117f1d4873"} Dec 05 17:06:52 crc kubenswrapper[4753]: I1205 17:06:52.802601 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wrrx9" event={"ID":"fd4c8ac7-365b-4f24-b44c-b1cbc91b339b","Type":"ContainerStarted","Data":"ac2f70dd6372231174dbde4c01ce8347ce0fa4e5f27f9bd2debff83880124a6f"} Dec 05 17:06:53 crc kubenswrapper[4753]: I1205 17:06:53.083387 4753 patch_prober.go:28] interesting pod/router-default-5444994796-9tzjp container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 17:06:53 crc kubenswrapper[4753]: [-]has-synced failed: reason withheld Dec 05 17:06:53 crc kubenswrapper[4753]: [+]process-running ok Dec 05 17:06:53 crc kubenswrapper[4753]: healthz check failed Dec 05 17:06:53 crc kubenswrapper[4753]: I1205 17:06:53.083456 4753 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-9tzjp" podUID="de65df6c-3a9a-4041-a67c-e6cbd766b4b2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 17:06:53 crc kubenswrapper[4753]: I1205 17:06:53.172245 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 05 17:06:53 crc kubenswrapper[4753]: I1205 17:06:53.174671 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 17:06:53 crc kubenswrapper[4753]: I1205 17:06:53.176951 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 05 17:06:53 crc kubenswrapper[4753]: I1205 17:06:53.177287 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 05 17:06:53 crc kubenswrapper[4753]: I1205 17:06:53.180074 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 05 17:06:53 crc kubenswrapper[4753]: I1205 17:06:53.265965 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/dcdf06e2-cc7a-4a33-a08f-61d234adf47b-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"dcdf06e2-cc7a-4a33-a08f-61d234adf47b\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 17:06:53 crc kubenswrapper[4753]: I1205 17:06:53.266049 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/dcdf06e2-cc7a-4a33-a08f-61d234adf47b-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"dcdf06e2-cc7a-4a33-a08f-61d234adf47b\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 17:06:53 crc kubenswrapper[4753]: I1205 17:06:53.266998 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 05 17:06:53 crc kubenswrapper[4753]: I1205 17:06:53.267902 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 17:06:53 crc kubenswrapper[4753]: I1205 17:06:53.274686 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Dec 05 17:06:53 crc kubenswrapper[4753]: I1205 17:06:53.276620 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Dec 05 17:06:53 crc kubenswrapper[4753]: I1205 17:06:53.280397 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 05 17:06:53 crc kubenswrapper[4753]: I1205 17:06:53.367663 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/dcdf06e2-cc7a-4a33-a08f-61d234adf47b-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"dcdf06e2-cc7a-4a33-a08f-61d234adf47b\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 17:06:53 crc kubenswrapper[4753]: I1205 17:06:53.367736 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f92a34e2-d0fe-485d-bded-768c62119421-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"f92a34e2-d0fe-485d-bded-768c62119421\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 17:06:53 crc kubenswrapper[4753]: I1205 17:06:53.367790 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/dcdf06e2-cc7a-4a33-a08f-61d234adf47b-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"dcdf06e2-cc7a-4a33-a08f-61d234adf47b\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 17:06:53 crc kubenswrapper[4753]: I1205 17:06:53.367810 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f92a34e2-d0fe-485d-bded-768c62119421-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"f92a34e2-d0fe-485d-bded-768c62119421\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 17:06:53 crc kubenswrapper[4753]: I1205 17:06:53.367921 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/dcdf06e2-cc7a-4a33-a08f-61d234adf47b-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"dcdf06e2-cc7a-4a33-a08f-61d234adf47b\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 17:06:53 crc kubenswrapper[4753]: I1205 17:06:53.402062 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/dcdf06e2-cc7a-4a33-a08f-61d234adf47b-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"dcdf06e2-cc7a-4a33-a08f-61d234adf47b\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 17:06:53 crc kubenswrapper[4753]: I1205 17:06:53.469189 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f92a34e2-d0fe-485d-bded-768c62119421-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"f92a34e2-d0fe-485d-bded-768c62119421\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 17:06:53 crc kubenswrapper[4753]: I1205 17:06:53.469239 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f92a34e2-d0fe-485d-bded-768c62119421-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"f92a34e2-d0fe-485d-bded-768c62119421\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 17:06:53 crc kubenswrapper[4753]: I1205 17:06:53.469604 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f92a34e2-d0fe-485d-bded-768c62119421-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"f92a34e2-d0fe-485d-bded-768c62119421\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 17:06:53 crc kubenswrapper[4753]: I1205 17:06:53.491125 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f92a34e2-d0fe-485d-bded-768c62119421-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"f92a34e2-d0fe-485d-bded-768c62119421\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 17:06:53 crc kubenswrapper[4753]: I1205 17:06:53.512713 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 17:06:53 crc kubenswrapper[4753]: I1205 17:06:53.604752 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 17:06:53 crc kubenswrapper[4753]: I1205 17:06:53.938884 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 05 17:06:53 crc kubenswrapper[4753]: W1205 17:06:53.967580 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-podf92a34e2_d0fe_485d_bded_768c62119421.slice/crio-763269ecaf487301acce0e6471e69570b7f65231b521c239c2f890d4f1d43b24 WatchSource:0}: Error finding container 763269ecaf487301acce0e6471e69570b7f65231b521c239c2f890d4f1d43b24: Status 404 returned error can't find the container with id 763269ecaf487301acce0e6471e69570b7f65231b521c239c2f890d4f1d43b24 Dec 05 17:06:54 crc kubenswrapper[4753]: I1205 17:06:54.050367 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 05 17:06:54 crc kubenswrapper[4753]: W1205 17:06:54.072333 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-poddcdf06e2_cc7a_4a33_a08f_61d234adf47b.slice/crio-98e4efa9ae8856dedd9c599488e8e30f597247d804bee3972553f702d11c8d12 WatchSource:0}: Error finding container 98e4efa9ae8856dedd9c599488e8e30f597247d804bee3972553f702d11c8d12: Status 404 returned error can't find the container with id 98e4efa9ae8856dedd9c599488e8e30f597247d804bee3972553f702d11c8d12 Dec 05 17:06:54 crc kubenswrapper[4753]: I1205 17:06:54.084478 4753 patch_prober.go:28] interesting pod/router-default-5444994796-9tzjp container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 17:06:54 crc kubenswrapper[4753]: [-]has-synced failed: reason withheld Dec 05 17:06:54 crc kubenswrapper[4753]: [+]process-running ok Dec 05 17:06:54 crc kubenswrapper[4753]: healthz check failed Dec 05 17:06:54 crc kubenswrapper[4753]: I1205 17:06:54.084521 4753 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-9tzjp" podUID="de65df6c-3a9a-4041-a67c-e6cbd766b4b2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 17:06:54 crc kubenswrapper[4753]: I1205 17:06:54.858736 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"dcdf06e2-cc7a-4a33-a08f-61d234adf47b","Type":"ContainerStarted","Data":"98e4efa9ae8856dedd9c599488e8e30f597247d804bee3972553f702d11c8d12"} Dec 05 17:06:54 crc kubenswrapper[4753]: I1205 17:06:54.863236 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"f92a34e2-d0fe-485d-bded-768c62119421","Type":"ContainerStarted","Data":"763269ecaf487301acce0e6471e69570b7f65231b521c239c2f890d4f1d43b24"} Dec 05 17:06:55 crc kubenswrapper[4753]: I1205 17:06:55.083603 4753 patch_prober.go:28] interesting pod/router-default-5444994796-9tzjp container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 17:06:55 crc kubenswrapper[4753]: [-]has-synced failed: reason withheld Dec 05 17:06:55 crc kubenswrapper[4753]: [+]process-running ok Dec 05 17:06:55 crc kubenswrapper[4753]: healthz check failed Dec 05 17:06:55 crc kubenswrapper[4753]: I1205 17:06:55.083681 4753 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-9tzjp" podUID="de65df6c-3a9a-4041-a67c-e6cbd766b4b2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 17:06:55 crc kubenswrapper[4753]: I1205 17:06:55.556347 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-rkmxh" Dec 05 17:06:55 crc kubenswrapper[4753]: I1205 17:06:55.562867 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-rkmxh" Dec 05 17:06:55 crc kubenswrapper[4753]: I1205 17:06:55.658097 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-xhb8z" Dec 05 17:06:55 crc kubenswrapper[4753]: I1205 17:06:55.927438 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"dcdf06e2-cc7a-4a33-a08f-61d234adf47b","Type":"ContainerStarted","Data":"7fa759459d12d227310f4b7090872e112dcc4d7858c42de733dcff54ecf17f80"} Dec 05 17:06:55 crc kubenswrapper[4753]: I1205 17:06:55.933936 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"f92a34e2-d0fe-485d-bded-768c62119421","Type":"ContainerStarted","Data":"3bb00b7517f77bbedbdc93db2097b900db756402724a4717108b33949a7391dd"} Dec 05 17:06:55 crc kubenswrapper[4753]: I1205 17:06:55.978927 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=2.978909412 podStartE2EDuration="2.978909412s" podCreationTimestamp="2025-12-05 17:06:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:06:55.954063899 +0000 UTC m=+154.457170905" watchObservedRunningTime="2025-12-05 17:06:55.978909412 +0000 UTC m=+154.482016418" Dec 05 17:06:55 crc kubenswrapper[4753]: I1205 17:06:55.992277 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=2.992253506 podStartE2EDuration="2.992253506s" podCreationTimestamp="2025-12-05 17:06:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:06:55.977697176 +0000 UTC m=+154.480804182" watchObservedRunningTime="2025-12-05 17:06:55.992253506 +0000 UTC m=+154.495360512" Dec 05 17:06:56 crc kubenswrapper[4753]: I1205 17:06:56.083067 4753 patch_prober.go:28] interesting pod/router-default-5444994796-9tzjp container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 17:06:56 crc kubenswrapper[4753]: [-]has-synced failed: reason withheld Dec 05 17:06:56 crc kubenswrapper[4753]: [+]process-running ok Dec 05 17:06:56 crc kubenswrapper[4753]: healthz check failed Dec 05 17:06:56 crc kubenswrapper[4753]: I1205 17:06:56.083180 4753 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-9tzjp" podUID="de65df6c-3a9a-4041-a67c-e6cbd766b4b2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 17:06:56 crc kubenswrapper[4753]: I1205 17:06:56.940334 4753 generic.go:334] "Generic (PLEG): container finished" podID="f92a34e2-d0fe-485d-bded-768c62119421" containerID="3bb00b7517f77bbedbdc93db2097b900db756402724a4717108b33949a7391dd" exitCode=0 Dec 05 17:06:56 crc kubenswrapper[4753]: I1205 17:06:56.940385 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"f92a34e2-d0fe-485d-bded-768c62119421","Type":"ContainerDied","Data":"3bb00b7517f77bbedbdc93db2097b900db756402724a4717108b33949a7391dd"} Dec 05 17:06:56 crc kubenswrapper[4753]: I1205 17:06:56.943458 4753 generic.go:334] "Generic (PLEG): container finished" podID="dcdf06e2-cc7a-4a33-a08f-61d234adf47b" containerID="7fa759459d12d227310f4b7090872e112dcc4d7858c42de733dcff54ecf17f80" exitCode=0 Dec 05 17:06:56 crc kubenswrapper[4753]: I1205 17:06:56.943485 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"dcdf06e2-cc7a-4a33-a08f-61d234adf47b","Type":"ContainerDied","Data":"7fa759459d12d227310f4b7090872e112dcc4d7858c42de733dcff54ecf17f80"} Dec 05 17:06:57 crc kubenswrapper[4753]: I1205 17:06:57.083206 4753 patch_prober.go:28] interesting pod/router-default-5444994796-9tzjp container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 17:06:57 crc kubenswrapper[4753]: [-]has-synced failed: reason withheld Dec 05 17:06:57 crc kubenswrapper[4753]: [+]process-running ok Dec 05 17:06:57 crc kubenswrapper[4753]: healthz check failed Dec 05 17:06:57 crc kubenswrapper[4753]: I1205 17:06:57.083857 4753 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-9tzjp" podUID="de65df6c-3a9a-4041-a67c-e6cbd766b4b2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 17:06:58 crc kubenswrapper[4753]: I1205 17:06:58.083580 4753 patch_prober.go:28] interesting pod/router-default-5444994796-9tzjp container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 17:06:58 crc kubenswrapper[4753]: [-]has-synced failed: reason withheld Dec 05 17:06:58 crc kubenswrapper[4753]: [+]process-running ok Dec 05 17:06:58 crc kubenswrapper[4753]: healthz check failed Dec 05 17:06:58 crc kubenswrapper[4753]: I1205 17:06:58.083634 4753 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-9tzjp" podUID="de65df6c-3a9a-4041-a67c-e6cbd766b4b2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 17:06:58 crc kubenswrapper[4753]: I1205 17:06:58.979717 4753 patch_prober.go:28] interesting pod/machine-config-daemon-khn68 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 17:06:58 crc kubenswrapper[4753]: I1205 17:06:58.980124 4753 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 17:06:59 crc kubenswrapper[4753]: I1205 17:06:59.081828 4753 patch_prober.go:28] interesting pod/router-default-5444994796-9tzjp container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 17:06:59 crc kubenswrapper[4753]: [-]has-synced failed: reason withheld Dec 05 17:06:59 crc kubenswrapper[4753]: [+]process-running ok Dec 05 17:06:59 crc kubenswrapper[4753]: healthz check failed Dec 05 17:06:59 crc kubenswrapper[4753]: I1205 17:06:59.081912 4753 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-9tzjp" podUID="de65df6c-3a9a-4041-a67c-e6cbd766b4b2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 17:07:00 crc kubenswrapper[4753]: I1205 17:07:00.083062 4753 patch_prober.go:28] interesting pod/router-default-5444994796-9tzjp container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 17:07:00 crc kubenswrapper[4753]: [-]has-synced failed: reason withheld Dec 05 17:07:00 crc kubenswrapper[4753]: [+]process-running ok Dec 05 17:07:00 crc kubenswrapper[4753]: healthz check failed Dec 05 17:07:00 crc kubenswrapper[4753]: I1205 17:07:00.083206 4753 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-9tzjp" podUID="de65df6c-3a9a-4041-a67c-e6cbd766b4b2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 17:07:00 crc kubenswrapper[4753]: I1205 17:07:00.354633 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-2nb5t" Dec 05 17:07:00 crc kubenswrapper[4753]: I1205 17:07:00.680312 4753 patch_prober.go:28] interesting pod/console-f9d7485db-9c622 container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.17:8443/health\": dial tcp 10.217.0.17:8443: connect: connection refused" start-of-body= Dec 05 17:07:00 crc kubenswrapper[4753]: I1205 17:07:00.680837 4753 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-9c622" podUID="48038042-7b0f-48d9-9f90-6c0b9dd179d6" containerName="console" probeResult="failure" output="Get \"https://10.217.0.17:8443/health\": dial tcp 10.217.0.17:8443: connect: connection refused" Dec 05 17:07:01 crc kubenswrapper[4753]: I1205 17:07:01.082318 4753 patch_prober.go:28] interesting pod/router-default-5444994796-9tzjp container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 17:07:01 crc kubenswrapper[4753]: [-]has-synced failed: reason withheld Dec 05 17:07:01 crc kubenswrapper[4753]: [+]process-running ok Dec 05 17:07:01 crc kubenswrapper[4753]: healthz check failed Dec 05 17:07:01 crc kubenswrapper[4753]: I1205 17:07:01.082395 4753 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-9tzjp" podUID="de65df6c-3a9a-4041-a67c-e6cbd766b4b2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 17:07:02 crc kubenswrapper[4753]: I1205 17:07:02.083996 4753 patch_prober.go:28] interesting pod/router-default-5444994796-9tzjp container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 17:07:02 crc kubenswrapper[4753]: [-]has-synced failed: reason withheld Dec 05 17:07:02 crc kubenswrapper[4753]: [+]process-running ok Dec 05 17:07:02 crc kubenswrapper[4753]: healthz check failed Dec 05 17:07:02 crc kubenswrapper[4753]: I1205 17:07:02.084084 4753 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-9tzjp" podUID="de65df6c-3a9a-4041-a67c-e6cbd766b4b2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 17:07:02 crc kubenswrapper[4753]: I1205 17:07:02.322480 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 17:07:02 crc kubenswrapper[4753]: I1205 17:07:02.362347 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f92a34e2-d0fe-485d-bded-768c62119421-kubelet-dir\") pod \"f92a34e2-d0fe-485d-bded-768c62119421\" (UID: \"f92a34e2-d0fe-485d-bded-768c62119421\") " Dec 05 17:07:02 crc kubenswrapper[4753]: I1205 17:07:02.362473 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f92a34e2-d0fe-485d-bded-768c62119421-kube-api-access\") pod \"f92a34e2-d0fe-485d-bded-768c62119421\" (UID: \"f92a34e2-d0fe-485d-bded-768c62119421\") " Dec 05 17:07:02 crc kubenswrapper[4753]: I1205 17:07:02.362691 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f92a34e2-d0fe-485d-bded-768c62119421-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "f92a34e2-d0fe-485d-bded-768c62119421" (UID: "f92a34e2-d0fe-485d-bded-768c62119421"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 17:07:02 crc kubenswrapper[4753]: I1205 17:07:02.362978 4753 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f92a34e2-d0fe-485d-bded-768c62119421-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 05 17:07:02 crc kubenswrapper[4753]: I1205 17:07:02.369063 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f92a34e2-d0fe-485d-bded-768c62119421-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "f92a34e2-d0fe-485d-bded-768c62119421" (UID: "f92a34e2-d0fe-485d-bded-768c62119421"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:07:02 crc kubenswrapper[4753]: I1205 17:07:02.463581 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f92a34e2-d0fe-485d-bded-768c62119421-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 17:07:02 crc kubenswrapper[4753]: I1205 17:07:02.998292 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"f92a34e2-d0fe-485d-bded-768c62119421","Type":"ContainerDied","Data":"763269ecaf487301acce0e6471e69570b7f65231b521c239c2f890d4f1d43b24"} Dec 05 17:07:02 crc kubenswrapper[4753]: I1205 17:07:02.998556 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="763269ecaf487301acce0e6471e69570b7f65231b521c239c2f890d4f1d43b24" Dec 05 17:07:02 crc kubenswrapper[4753]: I1205 17:07:02.998440 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 17:07:03 crc kubenswrapper[4753]: I1205 17:07:03.082744 4753 patch_prober.go:28] interesting pod/router-default-5444994796-9tzjp container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 17:07:03 crc kubenswrapper[4753]: [+]has-synced ok Dec 05 17:07:03 crc kubenswrapper[4753]: [+]process-running ok Dec 05 17:07:03 crc kubenswrapper[4753]: healthz check failed Dec 05 17:07:03 crc kubenswrapper[4753]: I1205 17:07:03.082837 4753 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-9tzjp" podUID="de65df6c-3a9a-4041-a67c-e6cbd766b4b2" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 17:07:03 crc kubenswrapper[4753]: I1205 17:07:03.590363 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 17:07:03 crc kubenswrapper[4753]: I1205 17:07:03.680316 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/dcdf06e2-cc7a-4a33-a08f-61d234adf47b-kube-api-access\") pod \"dcdf06e2-cc7a-4a33-a08f-61d234adf47b\" (UID: \"dcdf06e2-cc7a-4a33-a08f-61d234adf47b\") " Dec 05 17:07:03 crc kubenswrapper[4753]: I1205 17:07:03.680439 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/dcdf06e2-cc7a-4a33-a08f-61d234adf47b-kubelet-dir\") pod \"dcdf06e2-cc7a-4a33-a08f-61d234adf47b\" (UID: \"dcdf06e2-cc7a-4a33-a08f-61d234adf47b\") " Dec 05 17:07:03 crc kubenswrapper[4753]: I1205 17:07:03.680579 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/dcdf06e2-cc7a-4a33-a08f-61d234adf47b-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "dcdf06e2-cc7a-4a33-a08f-61d234adf47b" (UID: "dcdf06e2-cc7a-4a33-a08f-61d234adf47b"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 17:07:03 crc kubenswrapper[4753]: I1205 17:07:03.680938 4753 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/dcdf06e2-cc7a-4a33-a08f-61d234adf47b-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 05 17:07:03 crc kubenswrapper[4753]: I1205 17:07:03.690479 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dcdf06e2-cc7a-4a33-a08f-61d234adf47b-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "dcdf06e2-cc7a-4a33-a08f-61d234adf47b" (UID: "dcdf06e2-cc7a-4a33-a08f-61d234adf47b"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:07:03 crc kubenswrapper[4753]: I1205 17:07:03.781883 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/dcdf06e2-cc7a-4a33-a08f-61d234adf47b-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 17:07:04 crc kubenswrapper[4753]: I1205 17:07:04.009857 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"dcdf06e2-cc7a-4a33-a08f-61d234adf47b","Type":"ContainerDied","Data":"98e4efa9ae8856dedd9c599488e8e30f597247d804bee3972553f702d11c8d12"} Dec 05 17:07:04 crc kubenswrapper[4753]: I1205 17:07:04.009912 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="98e4efa9ae8856dedd9c599488e8e30f597247d804bee3972553f702d11c8d12" Dec 05 17:07:04 crc kubenswrapper[4753]: I1205 17:07:04.009918 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 17:07:04 crc kubenswrapper[4753]: I1205 17:07:04.085318 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-9tzjp" Dec 05 17:07:04 crc kubenswrapper[4753]: I1205 17:07:04.087698 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-9tzjp" Dec 05 17:07:05 crc kubenswrapper[4753]: I1205 17:07:05.204979 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/00ab636b-9cc9-4a6f-8e6e-6442b35280ca-metrics-certs\") pod \"network-metrics-daemon-jjgfd\" (UID: \"00ab636b-9cc9-4a6f-8e6e-6442b35280ca\") " pod="openshift-multus/network-metrics-daemon-jjgfd" Dec 05 17:07:05 crc kubenswrapper[4753]: I1205 17:07:05.223937 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/00ab636b-9cc9-4a6f-8e6e-6442b35280ca-metrics-certs\") pod \"network-metrics-daemon-jjgfd\" (UID: \"00ab636b-9cc9-4a6f-8e6e-6442b35280ca\") " pod="openshift-multus/network-metrics-daemon-jjgfd" Dec 05 17:07:05 crc kubenswrapper[4753]: I1205 17:07:05.442396 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-jjgfd" Dec 05 17:07:08 crc kubenswrapper[4753]: I1205 17:07:08.505902 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:07:10 crc kubenswrapper[4753]: I1205 17:07:10.687695 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-9c622" Dec 05 17:07:10 crc kubenswrapper[4753]: I1205 17:07:10.692483 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-9c622" Dec 05 17:07:20 crc kubenswrapper[4753]: E1205 17:07:20.827792 4753 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 05 17:07:20 crc kubenswrapper[4753]: E1205 17:07:20.828523 4753 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-5nw7t,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-dgpzc_openshift-marketplace(5560048a-c9e9-4743-9573-1b58a2240c29): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 17:07:20 crc kubenswrapper[4753]: E1205 17:07:20.829713 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-dgpzc" podUID="5560048a-c9e9-4743-9573-1b58a2240c29" Dec 05 17:07:20 crc kubenswrapper[4753]: I1205 17:07:20.882745 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mjbr2" Dec 05 17:07:24 crc kubenswrapper[4753]: E1205 17:07:24.948850 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-dgpzc" podUID="5560048a-c9e9-4743-9573-1b58a2240c29" Dec 05 17:07:25 crc kubenswrapper[4753]: E1205 17:07:25.044896 4753 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 05 17:07:25 crc kubenswrapper[4753]: E1205 17:07:25.044980 4753 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 05 17:07:25 crc kubenswrapper[4753]: E1205 17:07:25.045750 4753 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-g5zgt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-qt9vk_openshift-marketplace(9a495df2-eeed-4f2a-b492-cf5aaaaab8f9): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 17:07:25 crc kubenswrapper[4753]: E1205 17:07:25.045821 4753 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-xnxg8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-cb4ng_openshift-marketplace(db1dcb29-b751-4a77-8f2e-68efadf955b9): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 17:07:25 crc kubenswrapper[4753]: E1205 17:07:25.047382 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-cb4ng" podUID="db1dcb29-b751-4a77-8f2e-68efadf955b9" Dec 05 17:07:25 crc kubenswrapper[4753]: E1205 17:07:25.047468 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-qt9vk" podUID="9a495df2-eeed-4f2a-b492-cf5aaaaab8f9" Dec 05 17:07:26 crc kubenswrapper[4753]: E1205 17:07:26.183227 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-qt9vk" podUID="9a495df2-eeed-4f2a-b492-cf5aaaaab8f9" Dec 05 17:07:26 crc kubenswrapper[4753]: E1205 17:07:26.183968 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-cb4ng" podUID="db1dcb29-b751-4a77-8f2e-68efadf955b9" Dec 05 17:07:26 crc kubenswrapper[4753]: E1205 17:07:26.250844 4753 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 05 17:07:26 crc kubenswrapper[4753]: E1205 17:07:26.251204 4753 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-wqw9m,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-64x2r_openshift-marketplace(c1873906-73e5-4c22-b547-233eb99f1562): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 17:07:26 crc kubenswrapper[4753]: E1205 17:07:26.252470 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-64x2r" podUID="c1873906-73e5-4c22-b547-233eb99f1562" Dec 05 17:07:26 crc kubenswrapper[4753]: I1205 17:07:26.563131 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 05 17:07:26 crc kubenswrapper[4753]: E1205 17:07:26.563378 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dcdf06e2-cc7a-4a33-a08f-61d234adf47b" containerName="pruner" Dec 05 17:07:26 crc kubenswrapper[4753]: I1205 17:07:26.563393 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="dcdf06e2-cc7a-4a33-a08f-61d234adf47b" containerName="pruner" Dec 05 17:07:26 crc kubenswrapper[4753]: E1205 17:07:26.563408 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f92a34e2-d0fe-485d-bded-768c62119421" containerName="pruner" Dec 05 17:07:26 crc kubenswrapper[4753]: I1205 17:07:26.563416 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="f92a34e2-d0fe-485d-bded-768c62119421" containerName="pruner" Dec 05 17:07:26 crc kubenswrapper[4753]: I1205 17:07:26.563515 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="dcdf06e2-cc7a-4a33-a08f-61d234adf47b" containerName="pruner" Dec 05 17:07:26 crc kubenswrapper[4753]: I1205 17:07:26.563535 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="f92a34e2-d0fe-485d-bded-768c62119421" containerName="pruner" Dec 05 17:07:26 crc kubenswrapper[4753]: I1205 17:07:26.564038 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 17:07:26 crc kubenswrapper[4753]: I1205 17:07:26.568916 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 05 17:07:26 crc kubenswrapper[4753]: I1205 17:07:26.569287 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 05 17:07:26 crc kubenswrapper[4753]: I1205 17:07:26.574527 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 05 17:07:26 crc kubenswrapper[4753]: I1205 17:07:26.580252 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6d573bc3-94ca-449b-80f0-0baf670e2392-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"6d573bc3-94ca-449b-80f0-0baf670e2392\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 17:07:26 crc kubenswrapper[4753]: I1205 17:07:26.580362 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/6d573bc3-94ca-449b-80f0-0baf670e2392-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"6d573bc3-94ca-449b-80f0-0baf670e2392\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 17:07:26 crc kubenswrapper[4753]: I1205 17:07:26.681663 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6d573bc3-94ca-449b-80f0-0baf670e2392-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"6d573bc3-94ca-449b-80f0-0baf670e2392\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 17:07:26 crc kubenswrapper[4753]: I1205 17:07:26.681797 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/6d573bc3-94ca-449b-80f0-0baf670e2392-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"6d573bc3-94ca-449b-80f0-0baf670e2392\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 17:07:26 crc kubenswrapper[4753]: I1205 17:07:26.681892 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/6d573bc3-94ca-449b-80f0-0baf670e2392-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"6d573bc3-94ca-449b-80f0-0baf670e2392\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 17:07:26 crc kubenswrapper[4753]: I1205 17:07:26.703511 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6d573bc3-94ca-449b-80f0-0baf670e2392-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"6d573bc3-94ca-449b-80f0-0baf670e2392\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 17:07:26 crc kubenswrapper[4753]: I1205 17:07:26.917578 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 17:07:28 crc kubenswrapper[4753]: I1205 17:07:28.979123 4753 patch_prober.go:28] interesting pod/machine-config-daemon-khn68 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 17:07:28 crc kubenswrapper[4753]: I1205 17:07:28.979944 4753 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 17:07:29 crc kubenswrapper[4753]: I1205 17:07:29.044010 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:07:29 crc kubenswrapper[4753]: E1205 17:07:29.300288 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-64x2r" podUID="c1873906-73e5-4c22-b547-233eb99f1562" Dec 05 17:07:29 crc kubenswrapper[4753]: E1205 17:07:29.425543 4753 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 05 17:07:29 crc kubenswrapper[4753]: E1205 17:07:29.426129 4753 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-v49dc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-c4kqm_openshift-marketplace(ee9a0d97-2cf8-48bc-b90a-59f0c4c3b76b): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 17:07:29 crc kubenswrapper[4753]: E1205 17:07:29.426921 4753 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Dec 05 17:07:29 crc kubenswrapper[4753]: E1205 17:07:29.427095 4753 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-fqs4q,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-6ggrp_openshift-marketplace(36b2ef01-2c7a-4313-a03a-be77a660d987): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 17:07:29 crc kubenswrapper[4753]: E1205 17:07:29.433105 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-c4kqm" podUID="ee9a0d97-2cf8-48bc-b90a-59f0c4c3b76b" Dec 05 17:07:29 crc kubenswrapper[4753]: E1205 17:07:29.433091 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-6ggrp" podUID="36b2ef01-2c7a-4313-a03a-be77a660d987" Dec 05 17:07:29 crc kubenswrapper[4753]: I1205 17:07:29.864826 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-jjgfd"] Dec 05 17:07:29 crc kubenswrapper[4753]: I1205 17:07:29.909405 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 05 17:07:29 crc kubenswrapper[4753]: W1205 17:07:29.911309 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod6d573bc3_94ca_449b_80f0_0baf670e2392.slice/crio-f3767233b9b4a18712e0743aa35f677f344db2944213d5a7e2a2084d80f48cfb WatchSource:0}: Error finding container f3767233b9b4a18712e0743aa35f677f344db2944213d5a7e2a2084d80f48cfb: Status 404 returned error can't find the container with id f3767233b9b4a18712e0743aa35f677f344db2944213d5a7e2a2084d80f48cfb Dec 05 17:07:29 crc kubenswrapper[4753]: W1205 17:07:29.911788 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod00ab636b_9cc9_4a6f_8e6e_6442b35280ca.slice/crio-a20d72d734594e977946f653b2cf273526d20fe7950d64be9c433e95c96ff342 WatchSource:0}: Error finding container a20d72d734594e977946f653b2cf273526d20fe7950d64be9c433e95c96ff342: Status 404 returned error can't find the container with id a20d72d734594e977946f653b2cf273526d20fe7950d64be9c433e95c96ff342 Dec 05 17:07:30 crc kubenswrapper[4753]: I1205 17:07:30.167426 4753 generic.go:334] "Generic (PLEG): container finished" podID="07c18cf7-d24d-46d2-90c1-0ef186b1c434" containerID="9f797544f347f56705c03d4fdff050d55310e5d32cee991eeb6106f8c715a883" exitCode=0 Dec 05 17:07:30 crc kubenswrapper[4753]: I1205 17:07:30.167531 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t2qjv" event={"ID":"07c18cf7-d24d-46d2-90c1-0ef186b1c434","Type":"ContainerDied","Data":"9f797544f347f56705c03d4fdff050d55310e5d32cee991eeb6106f8c715a883"} Dec 05 17:07:30 crc kubenswrapper[4753]: I1205 17:07:30.170231 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"6d573bc3-94ca-449b-80f0-0baf670e2392","Type":"ContainerStarted","Data":"f3767233b9b4a18712e0743aa35f677f344db2944213d5a7e2a2084d80f48cfb"} Dec 05 17:07:30 crc kubenswrapper[4753]: I1205 17:07:30.174811 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-jjgfd" event={"ID":"00ab636b-9cc9-4a6f-8e6e-6442b35280ca","Type":"ContainerStarted","Data":"a20d72d734594e977946f653b2cf273526d20fe7950d64be9c433e95c96ff342"} Dec 05 17:07:30 crc kubenswrapper[4753]: I1205 17:07:30.178605 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wrrx9" event={"ID":"fd4c8ac7-365b-4f24-b44c-b1cbc91b339b","Type":"ContainerStarted","Data":"880d3b3533f1ad143c6a991403465225cc26e4a9c30a34883a1e8f7a990cf62e"} Dec 05 17:07:30 crc kubenswrapper[4753]: E1205 17:07:30.179837 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-c4kqm" podUID="ee9a0d97-2cf8-48bc-b90a-59f0c4c3b76b" Dec 05 17:07:30 crc kubenswrapper[4753]: E1205 17:07:30.185260 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-6ggrp" podUID="36b2ef01-2c7a-4313-a03a-be77a660d987" Dec 05 17:07:31 crc kubenswrapper[4753]: I1205 17:07:31.201523 4753 generic.go:334] "Generic (PLEG): container finished" podID="fd4c8ac7-365b-4f24-b44c-b1cbc91b339b" containerID="880d3b3533f1ad143c6a991403465225cc26e4a9c30a34883a1e8f7a990cf62e" exitCode=0 Dec 05 17:07:31 crc kubenswrapper[4753]: I1205 17:07:31.201739 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wrrx9" event={"ID":"fd4c8ac7-365b-4f24-b44c-b1cbc91b339b","Type":"ContainerDied","Data":"880d3b3533f1ad143c6a991403465225cc26e4a9c30a34883a1e8f7a990cf62e"} Dec 05 17:07:31 crc kubenswrapper[4753]: I1205 17:07:31.209795 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t2qjv" event={"ID":"07c18cf7-d24d-46d2-90c1-0ef186b1c434","Type":"ContainerStarted","Data":"1a2129e0def20a2aa85de9f9ebe1a3b6d4c2427793e37e03fca5c1a477759165"} Dec 05 17:07:31 crc kubenswrapper[4753]: I1205 17:07:31.211959 4753 generic.go:334] "Generic (PLEG): container finished" podID="6d573bc3-94ca-449b-80f0-0baf670e2392" containerID="ee5c2ee283a2cc1f95f7843f9ea7a999a9554bb200e0eec65492c423458154cc" exitCode=0 Dec 05 17:07:31 crc kubenswrapper[4753]: I1205 17:07:31.212124 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"6d573bc3-94ca-449b-80f0-0baf670e2392","Type":"ContainerDied","Data":"ee5c2ee283a2cc1f95f7843f9ea7a999a9554bb200e0eec65492c423458154cc"} Dec 05 17:07:31 crc kubenswrapper[4753]: I1205 17:07:31.213937 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-jjgfd" event={"ID":"00ab636b-9cc9-4a6f-8e6e-6442b35280ca","Type":"ContainerStarted","Data":"a59491a1ab2ae64b984cf2328f9a6eb471bbf29dcd2a63d3a6e669ec09477792"} Dec 05 17:07:31 crc kubenswrapper[4753]: I1205 17:07:31.213965 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-jjgfd" event={"ID":"00ab636b-9cc9-4a6f-8e6e-6442b35280ca","Type":"ContainerStarted","Data":"f96881ffcb316da860bf30ab86d57cf2f4bfe124c55488e1719a7d817592b9e7"} Dec 05 17:07:31 crc kubenswrapper[4753]: I1205 17:07:31.280731 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-t2qjv" podStartSLOduration=3.332768781 podStartE2EDuration="43.280707666s" podCreationTimestamp="2025-12-05 17:06:48 +0000 UTC" firstStartedPulling="2025-12-05 17:06:50.724517605 +0000 UTC m=+149.227624611" lastFinishedPulling="2025-12-05 17:07:30.67245649 +0000 UTC m=+189.175563496" observedRunningTime="2025-12-05 17:07:31.261366126 +0000 UTC m=+189.764473142" watchObservedRunningTime="2025-12-05 17:07:31.280707666 +0000 UTC m=+189.783814662" Dec 05 17:07:31 crc kubenswrapper[4753]: I1205 17:07:31.295039 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-jjgfd" podStartSLOduration=169.295015488 podStartE2EDuration="2m49.295015488s" podCreationTimestamp="2025-12-05 17:04:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:07:31.292787312 +0000 UTC m=+189.795894318" watchObservedRunningTime="2025-12-05 17:07:31.295015488 +0000 UTC m=+189.798122494" Dec 05 17:07:32 crc kubenswrapper[4753]: I1205 17:07:32.156169 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 05 17:07:32 crc kubenswrapper[4753]: I1205 17:07:32.157616 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 05 17:07:32 crc kubenswrapper[4753]: I1205 17:07:32.175379 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 05 17:07:32 crc kubenswrapper[4753]: I1205 17:07:32.223045 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wrrx9" event={"ID":"fd4c8ac7-365b-4f24-b44c-b1cbc91b339b","Type":"ContainerStarted","Data":"a5985d6864154ad6710fc7a09870cbd92c7165c1f76a8ca26a77ad8b000be849"} Dec 05 17:07:32 crc kubenswrapper[4753]: I1205 17:07:32.249871 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-wrrx9" podStartSLOduration=2.203890025 podStartE2EDuration="41.249832942s" podCreationTimestamp="2025-12-05 17:06:51 +0000 UTC" firstStartedPulling="2025-12-05 17:06:52.805018263 +0000 UTC m=+151.308125269" lastFinishedPulling="2025-12-05 17:07:31.85096118 +0000 UTC m=+190.354068186" observedRunningTime="2025-12-05 17:07:32.24433541 +0000 UTC m=+190.747442416" watchObservedRunningTime="2025-12-05 17:07:32.249832942 +0000 UTC m=+190.752939958" Dec 05 17:07:32 crc kubenswrapper[4753]: I1205 17:07:32.269879 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/a02c0cda-e1a3-4d5e-a64c-0a232d89b057-var-lock\") pod \"installer-9-crc\" (UID: \"a02c0cda-e1a3-4d5e-a64c-0a232d89b057\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 17:07:32 crc kubenswrapper[4753]: I1205 17:07:32.269947 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a02c0cda-e1a3-4d5e-a64c-0a232d89b057-kubelet-dir\") pod \"installer-9-crc\" (UID: \"a02c0cda-e1a3-4d5e-a64c-0a232d89b057\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 17:07:32 crc kubenswrapper[4753]: I1205 17:07:32.269989 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a02c0cda-e1a3-4d5e-a64c-0a232d89b057-kube-api-access\") pod \"installer-9-crc\" (UID: \"a02c0cda-e1a3-4d5e-a64c-0a232d89b057\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 17:07:32 crc kubenswrapper[4753]: I1205 17:07:32.371895 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/a02c0cda-e1a3-4d5e-a64c-0a232d89b057-var-lock\") pod \"installer-9-crc\" (UID: \"a02c0cda-e1a3-4d5e-a64c-0a232d89b057\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 17:07:32 crc kubenswrapper[4753]: I1205 17:07:32.372378 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a02c0cda-e1a3-4d5e-a64c-0a232d89b057-kubelet-dir\") pod \"installer-9-crc\" (UID: \"a02c0cda-e1a3-4d5e-a64c-0a232d89b057\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 17:07:32 crc kubenswrapper[4753]: I1205 17:07:32.372547 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a02c0cda-e1a3-4d5e-a64c-0a232d89b057-kube-api-access\") pod \"installer-9-crc\" (UID: \"a02c0cda-e1a3-4d5e-a64c-0a232d89b057\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 17:07:32 crc kubenswrapper[4753]: I1205 17:07:32.372634 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a02c0cda-e1a3-4d5e-a64c-0a232d89b057-kubelet-dir\") pod \"installer-9-crc\" (UID: \"a02c0cda-e1a3-4d5e-a64c-0a232d89b057\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 17:07:32 crc kubenswrapper[4753]: I1205 17:07:32.372550 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/a02c0cda-e1a3-4d5e-a64c-0a232d89b057-var-lock\") pod \"installer-9-crc\" (UID: \"a02c0cda-e1a3-4d5e-a64c-0a232d89b057\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 17:07:32 crc kubenswrapper[4753]: I1205 17:07:32.409509 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a02c0cda-e1a3-4d5e-a64c-0a232d89b057-kube-api-access\") pod \"installer-9-crc\" (UID: \"a02c0cda-e1a3-4d5e-a64c-0a232d89b057\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 17:07:32 crc kubenswrapper[4753]: I1205 17:07:32.474742 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 05 17:07:32 crc kubenswrapper[4753]: I1205 17:07:32.558643 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 17:07:32 crc kubenswrapper[4753]: I1205 17:07:32.677498 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/6d573bc3-94ca-449b-80f0-0baf670e2392-kubelet-dir\") pod \"6d573bc3-94ca-449b-80f0-0baf670e2392\" (UID: \"6d573bc3-94ca-449b-80f0-0baf670e2392\") " Dec 05 17:07:32 crc kubenswrapper[4753]: I1205 17:07:32.677572 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6d573bc3-94ca-449b-80f0-0baf670e2392-kube-api-access\") pod \"6d573bc3-94ca-449b-80f0-0baf670e2392\" (UID: \"6d573bc3-94ca-449b-80f0-0baf670e2392\") " Dec 05 17:07:32 crc kubenswrapper[4753]: I1205 17:07:32.677900 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6d573bc3-94ca-449b-80f0-0baf670e2392-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "6d573bc3-94ca-449b-80f0-0baf670e2392" (UID: "6d573bc3-94ca-449b-80f0-0baf670e2392"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 17:07:32 crc kubenswrapper[4753]: I1205 17:07:32.684196 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6d573bc3-94ca-449b-80f0-0baf670e2392-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "6d573bc3-94ca-449b-80f0-0baf670e2392" (UID: "6d573bc3-94ca-449b-80f0-0baf670e2392"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:07:32 crc kubenswrapper[4753]: I1205 17:07:32.778906 4753 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/6d573bc3-94ca-449b-80f0-0baf670e2392-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 05 17:07:32 crc kubenswrapper[4753]: I1205 17:07:32.778975 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6d573bc3-94ca-449b-80f0-0baf670e2392-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 17:07:32 crc kubenswrapper[4753]: I1205 17:07:32.944264 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 05 17:07:32 crc kubenswrapper[4753]: W1205 17:07:32.954658 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-poda02c0cda_e1a3_4d5e_a64c_0a232d89b057.slice/crio-2dee77b3d5bf1e74b6c7a5c8c98bef468334d3ae029a74761b73e24d96047d51 WatchSource:0}: Error finding container 2dee77b3d5bf1e74b6c7a5c8c98bef468334d3ae029a74761b73e24d96047d51: Status 404 returned error can't find the container with id 2dee77b3d5bf1e74b6c7a5c8c98bef468334d3ae029a74761b73e24d96047d51 Dec 05 17:07:33 crc kubenswrapper[4753]: I1205 17:07:33.231948 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"a02c0cda-e1a3-4d5e-a64c-0a232d89b057","Type":"ContainerStarted","Data":"2dee77b3d5bf1e74b6c7a5c8c98bef468334d3ae029a74761b73e24d96047d51"} Dec 05 17:07:33 crc kubenswrapper[4753]: I1205 17:07:33.234625 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"6d573bc3-94ca-449b-80f0-0baf670e2392","Type":"ContainerDied","Data":"f3767233b9b4a18712e0743aa35f677f344db2944213d5a7e2a2084d80f48cfb"} Dec 05 17:07:33 crc kubenswrapper[4753]: I1205 17:07:33.234670 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f3767233b9b4a18712e0743aa35f677f344db2944213d5a7e2a2084d80f48cfb" Dec 05 17:07:33 crc kubenswrapper[4753]: I1205 17:07:33.234762 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 17:07:34 crc kubenswrapper[4753]: I1205 17:07:34.243850 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"a02c0cda-e1a3-4d5e-a64c-0a232d89b057","Type":"ContainerStarted","Data":"ea3568ffea086b577665daecf7670e6bb824487c18ef987c70c3163403e2d1f3"} Dec 05 17:07:34 crc kubenswrapper[4753]: I1205 17:07:34.268168 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=2.268127476 podStartE2EDuration="2.268127476s" podCreationTimestamp="2025-12-05 17:07:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:07:34.263739727 +0000 UTC m=+192.766846733" watchObservedRunningTime="2025-12-05 17:07:34.268127476 +0000 UTC m=+192.771234482" Dec 05 17:07:38 crc kubenswrapper[4753]: I1205 17:07:38.808314 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-t2qjv" Dec 05 17:07:38 crc kubenswrapper[4753]: I1205 17:07:38.809244 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-t2qjv" Dec 05 17:07:38 crc kubenswrapper[4753]: I1205 17:07:38.872917 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-t2qjv" Dec 05 17:07:39 crc kubenswrapper[4753]: I1205 17:07:39.289014 4753 generic.go:334] "Generic (PLEG): container finished" podID="5560048a-c9e9-4743-9573-1b58a2240c29" containerID="554f00266113309951d5c4824bd5c1def83c6655c5d30a5558cbf57440dd1891" exitCode=0 Dec 05 17:07:39 crc kubenswrapper[4753]: I1205 17:07:39.289097 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dgpzc" event={"ID":"5560048a-c9e9-4743-9573-1b58a2240c29","Type":"ContainerDied","Data":"554f00266113309951d5c4824bd5c1def83c6655c5d30a5558cbf57440dd1891"} Dec 05 17:07:39 crc kubenswrapper[4753]: I1205 17:07:39.343956 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-t2qjv" Dec 05 17:07:40 crc kubenswrapper[4753]: I1205 17:07:40.112924 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-t2qjv"] Dec 05 17:07:40 crc kubenswrapper[4753]: I1205 17:07:40.297269 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dgpzc" event={"ID":"5560048a-c9e9-4743-9573-1b58a2240c29","Type":"ContainerStarted","Data":"f5933bb2c58a4c2827813946aed957f3d8f26119b2c0827b98048b8f4da09914"} Dec 05 17:07:40 crc kubenswrapper[4753]: I1205 17:07:40.326127 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-dgpzc" podStartSLOduration=4.276639061 podStartE2EDuration="53.326109271s" podCreationTimestamp="2025-12-05 17:06:47 +0000 UTC" firstStartedPulling="2025-12-05 17:06:50.707453522 +0000 UTC m=+149.210560528" lastFinishedPulling="2025-12-05 17:07:39.756923732 +0000 UTC m=+198.260030738" observedRunningTime="2025-12-05 17:07:40.324605236 +0000 UTC m=+198.827712252" watchObservedRunningTime="2025-12-05 17:07:40.326109271 +0000 UTC m=+198.829216277" Dec 05 17:07:41 crc kubenswrapper[4753]: I1205 17:07:41.303534 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-t2qjv" podUID="07c18cf7-d24d-46d2-90c1-0ef186b1c434" containerName="registry-server" containerID="cri-o://1a2129e0def20a2aa85de9f9ebe1a3b6d4c2427793e37e03fca5c1a477759165" gracePeriod=2 Dec 05 17:07:41 crc kubenswrapper[4753]: I1205 17:07:41.457225 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-wrrx9" Dec 05 17:07:41 crc kubenswrapper[4753]: I1205 17:07:41.457307 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-wrrx9" Dec 05 17:07:41 crc kubenswrapper[4753]: I1205 17:07:41.498889 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-wrrx9" Dec 05 17:07:42 crc kubenswrapper[4753]: I1205 17:07:42.312525 4753 generic.go:334] "Generic (PLEG): container finished" podID="07c18cf7-d24d-46d2-90c1-0ef186b1c434" containerID="1a2129e0def20a2aa85de9f9ebe1a3b6d4c2427793e37e03fca5c1a477759165" exitCode=0 Dec 05 17:07:42 crc kubenswrapper[4753]: I1205 17:07:42.312669 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t2qjv" event={"ID":"07c18cf7-d24d-46d2-90c1-0ef186b1c434","Type":"ContainerDied","Data":"1a2129e0def20a2aa85de9f9ebe1a3b6d4c2427793e37e03fca5c1a477759165"} Dec 05 17:07:42 crc kubenswrapper[4753]: I1205 17:07:42.350283 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-wrrx9" Dec 05 17:07:42 crc kubenswrapper[4753]: I1205 17:07:42.659230 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-t2qjv" Dec 05 17:07:42 crc kubenswrapper[4753]: I1205 17:07:42.850467 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rb4kw\" (UniqueName: \"kubernetes.io/projected/07c18cf7-d24d-46d2-90c1-0ef186b1c434-kube-api-access-rb4kw\") pod \"07c18cf7-d24d-46d2-90c1-0ef186b1c434\" (UID: \"07c18cf7-d24d-46d2-90c1-0ef186b1c434\") " Dec 05 17:07:42 crc kubenswrapper[4753]: I1205 17:07:42.850511 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/07c18cf7-d24d-46d2-90c1-0ef186b1c434-catalog-content\") pod \"07c18cf7-d24d-46d2-90c1-0ef186b1c434\" (UID: \"07c18cf7-d24d-46d2-90c1-0ef186b1c434\") " Dec 05 17:07:42 crc kubenswrapper[4753]: I1205 17:07:42.850565 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/07c18cf7-d24d-46d2-90c1-0ef186b1c434-utilities\") pod \"07c18cf7-d24d-46d2-90c1-0ef186b1c434\" (UID: \"07c18cf7-d24d-46d2-90c1-0ef186b1c434\") " Dec 05 17:07:42 crc kubenswrapper[4753]: I1205 17:07:42.851541 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/07c18cf7-d24d-46d2-90c1-0ef186b1c434-utilities" (OuterVolumeSpecName: "utilities") pod "07c18cf7-d24d-46d2-90c1-0ef186b1c434" (UID: "07c18cf7-d24d-46d2-90c1-0ef186b1c434"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:07:42 crc kubenswrapper[4753]: I1205 17:07:42.862177 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/07c18cf7-d24d-46d2-90c1-0ef186b1c434-kube-api-access-rb4kw" (OuterVolumeSpecName: "kube-api-access-rb4kw") pod "07c18cf7-d24d-46d2-90c1-0ef186b1c434" (UID: "07c18cf7-d24d-46d2-90c1-0ef186b1c434"). InnerVolumeSpecName "kube-api-access-rb4kw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:07:42 crc kubenswrapper[4753]: I1205 17:07:42.909989 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/07c18cf7-d24d-46d2-90c1-0ef186b1c434-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "07c18cf7-d24d-46d2-90c1-0ef186b1c434" (UID: "07c18cf7-d24d-46d2-90c1-0ef186b1c434"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:07:42 crc kubenswrapper[4753]: I1205 17:07:42.952937 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rb4kw\" (UniqueName: \"kubernetes.io/projected/07c18cf7-d24d-46d2-90c1-0ef186b1c434-kube-api-access-rb4kw\") on node \"crc\" DevicePath \"\"" Dec 05 17:07:42 crc kubenswrapper[4753]: I1205 17:07:42.953718 4753 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/07c18cf7-d24d-46d2-90c1-0ef186b1c434-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 17:07:42 crc kubenswrapper[4753]: I1205 17:07:42.953739 4753 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/07c18cf7-d24d-46d2-90c1-0ef186b1c434-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 17:07:43 crc kubenswrapper[4753]: I1205 17:07:43.111747 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-wrrx9"] Dec 05 17:07:43 crc kubenswrapper[4753]: I1205 17:07:43.319391 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t2qjv" event={"ID":"07c18cf7-d24d-46d2-90c1-0ef186b1c434","Type":"ContainerDied","Data":"498064a4a81201cbb5ec54c37d4703ba1ae3303800eb2bf9a24f4d1222ac4d33"} Dec 05 17:07:43 crc kubenswrapper[4753]: I1205 17:07:43.319465 4753 scope.go:117] "RemoveContainer" containerID="1a2129e0def20a2aa85de9f9ebe1a3b6d4c2427793e37e03fca5c1a477759165" Dec 05 17:07:43 crc kubenswrapper[4753]: I1205 17:07:43.319509 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-t2qjv" Dec 05 17:07:43 crc kubenswrapper[4753]: I1205 17:07:43.351327 4753 scope.go:117] "RemoveContainer" containerID="9f797544f347f56705c03d4fdff050d55310e5d32cee991eeb6106f8c715a883" Dec 05 17:07:43 crc kubenswrapper[4753]: I1205 17:07:43.357295 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-t2qjv"] Dec 05 17:07:43 crc kubenswrapper[4753]: I1205 17:07:43.365231 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-t2qjv"] Dec 05 17:07:43 crc kubenswrapper[4753]: I1205 17:07:43.384806 4753 scope.go:117] "RemoveContainer" containerID="d50b5204633464a4d571bd81e8c24206e87b72f9e82d515bb49df7e51bc371ba" Dec 05 17:07:43 crc kubenswrapper[4753]: E1205 17:07:43.418029 4753 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod07c18cf7_d24d_46d2_90c1_0ef186b1c434.slice\": RecentStats: unable to find data in memory cache]" Dec 05 17:07:43 crc kubenswrapper[4753]: I1205 17:07:43.731442 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="07c18cf7-d24d-46d2-90c1-0ef186b1c434" path="/var/lib/kubelet/pods/07c18cf7-d24d-46d2-90c1-0ef186b1c434/volumes" Dec 05 17:07:44 crc kubenswrapper[4753]: I1205 17:07:44.327188 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-wrrx9" podUID="fd4c8ac7-365b-4f24-b44c-b1cbc91b339b" containerName="registry-server" containerID="cri-o://a5985d6864154ad6710fc7a09870cbd92c7165c1f76a8ca26a77ad8b000be849" gracePeriod=2 Dec 05 17:07:46 crc kubenswrapper[4753]: I1205 17:07:46.343210 4753 generic.go:334] "Generic (PLEG): container finished" podID="fd4c8ac7-365b-4f24-b44c-b1cbc91b339b" containerID="a5985d6864154ad6710fc7a09870cbd92c7165c1f76a8ca26a77ad8b000be849" exitCode=0 Dec 05 17:07:46 crc kubenswrapper[4753]: I1205 17:07:46.343267 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wrrx9" event={"ID":"fd4c8ac7-365b-4f24-b44c-b1cbc91b339b","Type":"ContainerDied","Data":"a5985d6864154ad6710fc7a09870cbd92c7165c1f76a8ca26a77ad8b000be849"} Dec 05 17:07:46 crc kubenswrapper[4753]: I1205 17:07:46.720475 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wrrx9" Dec 05 17:07:46 crc kubenswrapper[4753]: I1205 17:07:46.915743 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dxxbn\" (UniqueName: \"kubernetes.io/projected/fd4c8ac7-365b-4f24-b44c-b1cbc91b339b-kube-api-access-dxxbn\") pod \"fd4c8ac7-365b-4f24-b44c-b1cbc91b339b\" (UID: \"fd4c8ac7-365b-4f24-b44c-b1cbc91b339b\") " Dec 05 17:07:46 crc kubenswrapper[4753]: I1205 17:07:46.915862 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fd4c8ac7-365b-4f24-b44c-b1cbc91b339b-utilities\") pod \"fd4c8ac7-365b-4f24-b44c-b1cbc91b339b\" (UID: \"fd4c8ac7-365b-4f24-b44c-b1cbc91b339b\") " Dec 05 17:07:46 crc kubenswrapper[4753]: I1205 17:07:46.915956 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fd4c8ac7-365b-4f24-b44c-b1cbc91b339b-catalog-content\") pod \"fd4c8ac7-365b-4f24-b44c-b1cbc91b339b\" (UID: \"fd4c8ac7-365b-4f24-b44c-b1cbc91b339b\") " Dec 05 17:07:46 crc kubenswrapper[4753]: I1205 17:07:46.916937 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fd4c8ac7-365b-4f24-b44c-b1cbc91b339b-utilities" (OuterVolumeSpecName: "utilities") pod "fd4c8ac7-365b-4f24-b44c-b1cbc91b339b" (UID: "fd4c8ac7-365b-4f24-b44c-b1cbc91b339b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:07:46 crc kubenswrapper[4753]: I1205 17:07:46.923623 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fd4c8ac7-365b-4f24-b44c-b1cbc91b339b-kube-api-access-dxxbn" (OuterVolumeSpecName: "kube-api-access-dxxbn") pod "fd4c8ac7-365b-4f24-b44c-b1cbc91b339b" (UID: "fd4c8ac7-365b-4f24-b44c-b1cbc91b339b"). InnerVolumeSpecName "kube-api-access-dxxbn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:07:47 crc kubenswrapper[4753]: I1205 17:07:47.017766 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dxxbn\" (UniqueName: \"kubernetes.io/projected/fd4c8ac7-365b-4f24-b44c-b1cbc91b339b-kube-api-access-dxxbn\") on node \"crc\" DevicePath \"\"" Dec 05 17:07:47 crc kubenswrapper[4753]: I1205 17:07:47.017812 4753 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fd4c8ac7-365b-4f24-b44c-b1cbc91b339b-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 17:07:47 crc kubenswrapper[4753]: I1205 17:07:47.021525 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fd4c8ac7-365b-4f24-b44c-b1cbc91b339b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fd4c8ac7-365b-4f24-b44c-b1cbc91b339b" (UID: "fd4c8ac7-365b-4f24-b44c-b1cbc91b339b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:07:47 crc kubenswrapper[4753]: I1205 17:07:47.119272 4753 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fd4c8ac7-365b-4f24-b44c-b1cbc91b339b-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 17:07:47 crc kubenswrapper[4753]: I1205 17:07:47.352371 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qt9vk" event={"ID":"9a495df2-eeed-4f2a-b492-cf5aaaaab8f9","Type":"ContainerStarted","Data":"96a5a9eed098554f23948dee2d3db1b454beaafcf841d7ac421b53e7479917a3"} Dec 05 17:07:47 crc kubenswrapper[4753]: I1205 17:07:47.354383 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6ggrp" event={"ID":"36b2ef01-2c7a-4313-a03a-be77a660d987","Type":"ContainerStarted","Data":"679725a1f96effba01879455a097ec3924d1661dcb0d70f12ef8295c5221a4f5"} Dec 05 17:07:47 crc kubenswrapper[4753]: I1205 17:07:47.356547 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cb4ng" event={"ID":"db1dcb29-b751-4a77-8f2e-68efadf955b9","Type":"ContainerStarted","Data":"331de34d818762fb3b501d331960abf547ec5d6319dfa65f42de063f2f6ce82d"} Dec 05 17:07:47 crc kubenswrapper[4753]: I1205 17:07:47.359809 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wrrx9" event={"ID":"fd4c8ac7-365b-4f24-b44c-b1cbc91b339b","Type":"ContainerDied","Data":"ac2f70dd6372231174dbde4c01ce8347ce0fa4e5f27f9bd2debff83880124a6f"} Dec 05 17:07:47 crc kubenswrapper[4753]: I1205 17:07:47.359841 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wrrx9" Dec 05 17:07:47 crc kubenswrapper[4753]: I1205 17:07:47.359860 4753 scope.go:117] "RemoveContainer" containerID="a5985d6864154ad6710fc7a09870cbd92c7165c1f76a8ca26a77ad8b000be849" Dec 05 17:07:47 crc kubenswrapper[4753]: I1205 17:07:47.363651 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-64x2r" event={"ID":"c1873906-73e5-4c22-b547-233eb99f1562","Type":"ContainerStarted","Data":"4ef94ad49859273b39e13a547fb7850c223a0ce3d6f0b96e454c7d07d0ba7faf"} Dec 05 17:07:47 crc kubenswrapper[4753]: I1205 17:07:47.574270 4753 scope.go:117] "RemoveContainer" containerID="880d3b3533f1ad143c6a991403465225cc26e4a9c30a34883a1e8f7a990cf62e" Dec 05 17:07:47 crc kubenswrapper[4753]: I1205 17:07:47.606093 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-wrrx9"] Dec 05 17:07:47 crc kubenswrapper[4753]: I1205 17:07:47.621255 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-wrrx9"] Dec 05 17:07:47 crc kubenswrapper[4753]: I1205 17:07:47.631256 4753 scope.go:117] "RemoveContainer" containerID="4e8ce317ab59521ce59449fe2ece6a7288ff88e5abb1c5505a22d9117f1d4873" Dec 05 17:07:47 crc kubenswrapper[4753]: I1205 17:07:47.729624 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fd4c8ac7-365b-4f24-b44c-b1cbc91b339b" path="/var/lib/kubelet/pods/fd4c8ac7-365b-4f24-b44c-b1cbc91b339b/volumes" Dec 05 17:07:48 crc kubenswrapper[4753]: I1205 17:07:48.371868 4753 generic.go:334] "Generic (PLEG): container finished" podID="db1dcb29-b751-4a77-8f2e-68efadf955b9" containerID="331de34d818762fb3b501d331960abf547ec5d6319dfa65f42de063f2f6ce82d" exitCode=0 Dec 05 17:07:48 crc kubenswrapper[4753]: I1205 17:07:48.371962 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cb4ng" event={"ID":"db1dcb29-b751-4a77-8f2e-68efadf955b9","Type":"ContainerDied","Data":"331de34d818762fb3b501d331960abf547ec5d6319dfa65f42de063f2f6ce82d"} Dec 05 17:07:48 crc kubenswrapper[4753]: I1205 17:07:48.373766 4753 generic.go:334] "Generic (PLEG): container finished" podID="36b2ef01-2c7a-4313-a03a-be77a660d987" containerID="679725a1f96effba01879455a097ec3924d1661dcb0d70f12ef8295c5221a4f5" exitCode=0 Dec 05 17:07:48 crc kubenswrapper[4753]: I1205 17:07:48.373823 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6ggrp" event={"ID":"36b2ef01-2c7a-4313-a03a-be77a660d987","Type":"ContainerDied","Data":"679725a1f96effba01879455a097ec3924d1661dcb0d70f12ef8295c5221a4f5"} Dec 05 17:07:48 crc kubenswrapper[4753]: I1205 17:07:48.378557 4753 generic.go:334] "Generic (PLEG): container finished" podID="c1873906-73e5-4c22-b547-233eb99f1562" containerID="4ef94ad49859273b39e13a547fb7850c223a0ce3d6f0b96e454c7d07d0ba7faf" exitCode=0 Dec 05 17:07:48 crc kubenswrapper[4753]: I1205 17:07:48.378629 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-64x2r" event={"ID":"c1873906-73e5-4c22-b547-233eb99f1562","Type":"ContainerDied","Data":"4ef94ad49859273b39e13a547fb7850c223a0ce3d6f0b96e454c7d07d0ba7faf"} Dec 05 17:07:48 crc kubenswrapper[4753]: I1205 17:07:48.384025 4753 generic.go:334] "Generic (PLEG): container finished" podID="ee9a0d97-2cf8-48bc-b90a-59f0c4c3b76b" containerID="5e37e96162774e1c60e5b940dbeaa76fb4001dfdba6cd2719bb42cc73621a26d" exitCode=0 Dec 05 17:07:48 crc kubenswrapper[4753]: I1205 17:07:48.384169 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-c4kqm" event={"ID":"ee9a0d97-2cf8-48bc-b90a-59f0c4c3b76b","Type":"ContainerDied","Data":"5e37e96162774e1c60e5b940dbeaa76fb4001dfdba6cd2719bb42cc73621a26d"} Dec 05 17:07:48 crc kubenswrapper[4753]: I1205 17:07:48.387326 4753 generic.go:334] "Generic (PLEG): container finished" podID="9a495df2-eeed-4f2a-b492-cf5aaaaab8f9" containerID="96a5a9eed098554f23948dee2d3db1b454beaafcf841d7ac421b53e7479917a3" exitCode=0 Dec 05 17:07:48 crc kubenswrapper[4753]: I1205 17:07:48.387358 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qt9vk" event={"ID":"9a495df2-eeed-4f2a-b492-cf5aaaaab8f9","Type":"ContainerDied","Data":"96a5a9eed098554f23948dee2d3db1b454beaafcf841d7ac421b53e7479917a3"} Dec 05 17:07:48 crc kubenswrapper[4753]: I1205 17:07:48.811142 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-dgpzc" Dec 05 17:07:48 crc kubenswrapper[4753]: I1205 17:07:48.811615 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-dgpzc" Dec 05 17:07:48 crc kubenswrapper[4753]: I1205 17:07:48.865420 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-dgpzc" Dec 05 17:07:49 crc kubenswrapper[4753]: I1205 17:07:49.435755 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-dgpzc" Dec 05 17:07:53 crc kubenswrapper[4753]: I1205 17:07:53.422582 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-c4kqm" event={"ID":"ee9a0d97-2cf8-48bc-b90a-59f0c4c3b76b","Type":"ContainerStarted","Data":"1bd24ff8a3b303afe709884c1a501f9b3a4a44ed9c7bd565ee466ab3d370dee7"} Dec 05 17:07:54 crc kubenswrapper[4753]: I1205 17:07:54.449085 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cb4ng" event={"ID":"db1dcb29-b751-4a77-8f2e-68efadf955b9","Type":"ContainerStarted","Data":"905f329b9e0d4b8141b862cca0e0475439f65fc03c4b986da1771fc05b0b3045"} Dec 05 17:07:54 crc kubenswrapper[4753]: I1205 17:07:54.459469 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qt9vk" event={"ID":"9a495df2-eeed-4f2a-b492-cf5aaaaab8f9","Type":"ContainerStarted","Data":"03d07d3c50f6fed74c5b65bc90024353271f16d11f313d01fa1cbefcc2aeb7be"} Dec 05 17:07:54 crc kubenswrapper[4753]: I1205 17:07:54.474832 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-cb4ng" podStartSLOduration=3.856113907 podStartE2EDuration="1m7.474811759s" podCreationTimestamp="2025-12-05 17:06:47 +0000 UTC" firstStartedPulling="2025-12-05 17:06:50.651142541 +0000 UTC m=+149.154249547" lastFinishedPulling="2025-12-05 17:07:54.269840393 +0000 UTC m=+212.772947399" observedRunningTime="2025-12-05 17:07:54.468570275 +0000 UTC m=+212.971677281" watchObservedRunningTime="2025-12-05 17:07:54.474811759 +0000 UTC m=+212.977918765" Dec 05 17:07:54 crc kubenswrapper[4753]: I1205 17:07:54.511423 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-c4kqm" podStartSLOduration=3.818816413 podStartE2EDuration="1m4.511407641s" podCreationTimestamp="2025-12-05 17:06:50 +0000 UTC" firstStartedPulling="2025-12-05 17:06:51.750769946 +0000 UTC m=+150.253876952" lastFinishedPulling="2025-12-05 17:07:52.443361174 +0000 UTC m=+210.946468180" observedRunningTime="2025-12-05 17:07:54.490663187 +0000 UTC m=+212.993770193" watchObservedRunningTime="2025-12-05 17:07:54.511407641 +0000 UTC m=+213.014514647" Dec 05 17:07:55 crc kubenswrapper[4753]: I1205 17:07:55.469659 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6ggrp" event={"ID":"36b2ef01-2c7a-4313-a03a-be77a660d987","Type":"ContainerStarted","Data":"1a12fe20398cc5661c131c9376f28ec36f30003882c9933cc82a40021ab9b3a4"} Dec 05 17:07:55 crc kubenswrapper[4753]: I1205 17:07:55.471974 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-64x2r" event={"ID":"c1873906-73e5-4c22-b547-233eb99f1562","Type":"ContainerStarted","Data":"2bd83132b01fe603fbcde8033c6f644d65226b42b6a5de83b59bcd9a6ef63abc"} Dec 05 17:07:55 crc kubenswrapper[4753]: I1205 17:07:55.494249 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-6ggrp" podStartSLOduration=3.957825256 podStartE2EDuration="1m5.494229161s" podCreationTimestamp="2025-12-05 17:06:50 +0000 UTC" firstStartedPulling="2025-12-05 17:06:52.80526569 +0000 UTC m=+151.308372696" lastFinishedPulling="2025-12-05 17:07:54.341669595 +0000 UTC m=+212.844776601" observedRunningTime="2025-12-05 17:07:55.492069548 +0000 UTC m=+213.995176544" watchObservedRunningTime="2025-12-05 17:07:55.494229161 +0000 UTC m=+213.997336167" Dec 05 17:07:55 crc kubenswrapper[4753]: I1205 17:07:55.496226 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-qt9vk" podStartSLOduration=3.932430196 podStartE2EDuration="1m7.49621908s" podCreationTimestamp="2025-12-05 17:06:48 +0000 UTC" firstStartedPulling="2025-12-05 17:06:50.733877521 +0000 UTC m=+149.236984527" lastFinishedPulling="2025-12-05 17:07:54.297666405 +0000 UTC m=+212.800773411" observedRunningTime="2025-12-05 17:07:54.511848474 +0000 UTC m=+213.014955480" watchObservedRunningTime="2025-12-05 17:07:55.49621908 +0000 UTC m=+213.999326086" Dec 05 17:07:55 crc kubenswrapper[4753]: I1205 17:07:55.519459 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-64x2r" podStartSLOduration=2.924635492 podStartE2EDuration="1m5.519429336s" podCreationTimestamp="2025-12-05 17:06:50 +0000 UTC" firstStartedPulling="2025-12-05 17:06:51.750109607 +0000 UTC m=+150.253216613" lastFinishedPulling="2025-12-05 17:07:54.344903431 +0000 UTC m=+212.848010457" observedRunningTime="2025-12-05 17:07:55.514670815 +0000 UTC m=+214.017777821" watchObservedRunningTime="2025-12-05 17:07:55.519429336 +0000 UTC m=+214.022536342" Dec 05 17:07:58 crc kubenswrapper[4753]: I1205 17:07:58.315406 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-cb4ng" Dec 05 17:07:58 crc kubenswrapper[4753]: I1205 17:07:58.315476 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-cb4ng" Dec 05 17:07:58 crc kubenswrapper[4753]: I1205 17:07:58.364294 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-cb4ng" Dec 05 17:07:58 crc kubenswrapper[4753]: I1205 17:07:58.640623 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-qt9vk" Dec 05 17:07:58 crc kubenswrapper[4753]: I1205 17:07:58.640704 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-qt9vk" Dec 05 17:07:58 crc kubenswrapper[4753]: I1205 17:07:58.696092 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-qt9vk" Dec 05 17:07:58 crc kubenswrapper[4753]: I1205 17:07:58.979964 4753 patch_prober.go:28] interesting pod/machine-config-daemon-khn68 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 17:07:58 crc kubenswrapper[4753]: I1205 17:07:58.980062 4753 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 17:07:58 crc kubenswrapper[4753]: I1205 17:07:58.980129 4753 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-khn68" Dec 05 17:07:58 crc kubenswrapper[4753]: I1205 17:07:58.980937 4753 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"60c5d0ca3d26171050d592c3d76f0db4ca1a12344c40825a8e7c5fd579814c80"} pod="openshift-machine-config-operator/machine-config-daemon-khn68" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 17:07:58 crc kubenswrapper[4753]: I1205 17:07:58.981078 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerName="machine-config-daemon" containerID="cri-o://60c5d0ca3d26171050d592c3d76f0db4ca1a12344c40825a8e7c5fd579814c80" gracePeriod=600 Dec 05 17:07:59 crc kubenswrapper[4753]: I1205 17:07:59.549774 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-qt9vk" Dec 05 17:07:59 crc kubenswrapper[4753]: I1205 17:07:59.912837 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-qt9vk"] Dec 05 17:08:00 crc kubenswrapper[4753]: I1205 17:08:00.455455 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-c4kqm" Dec 05 17:08:00 crc kubenswrapper[4753]: I1205 17:08:00.455975 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-c4kqm" Dec 05 17:08:00 crc kubenswrapper[4753]: I1205 17:08:00.515640 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-c4kqm" Dec 05 17:08:00 crc kubenswrapper[4753]: I1205 17:08:00.520939 4753 generic.go:334] "Generic (PLEG): container finished" podID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerID="60c5d0ca3d26171050d592c3d76f0db4ca1a12344c40825a8e7c5fd579814c80" exitCode=0 Dec 05 17:08:00 crc kubenswrapper[4753]: I1205 17:08:00.521121 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" event={"ID":"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68","Type":"ContainerDied","Data":"60c5d0ca3d26171050d592c3d76f0db4ca1a12344c40825a8e7c5fd579814c80"} Dec 05 17:08:00 crc kubenswrapper[4753]: I1205 17:08:00.582065 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-c4kqm" Dec 05 17:08:00 crc kubenswrapper[4753]: I1205 17:08:00.863725 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-64x2r" Dec 05 17:08:00 crc kubenswrapper[4753]: I1205 17:08:00.864190 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-64x2r" Dec 05 17:08:00 crc kubenswrapper[4753]: I1205 17:08:00.918370 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-64x2r" Dec 05 17:08:01 crc kubenswrapper[4753]: I1205 17:08:01.245811 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-6ggrp" Dec 05 17:08:01 crc kubenswrapper[4753]: I1205 17:08:01.245913 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-6ggrp" Dec 05 17:08:01 crc kubenswrapper[4753]: I1205 17:08:01.288989 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-6ggrp" Dec 05 17:08:01 crc kubenswrapper[4753]: I1205 17:08:01.529592 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" event={"ID":"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68","Type":"ContainerStarted","Data":"b75942e9b07cd0facb3635e22b2009b01e5b0d5b508c9825ab0bd49c3ba268e9"} Dec 05 17:08:01 crc kubenswrapper[4753]: I1205 17:08:01.530464 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-qt9vk" podUID="9a495df2-eeed-4f2a-b492-cf5aaaaab8f9" containerName="registry-server" containerID="cri-o://03d07d3c50f6fed74c5b65bc90024353271f16d11f313d01fa1cbefcc2aeb7be" gracePeriod=2 Dec 05 17:08:01 crc kubenswrapper[4753]: I1205 17:08:01.579205 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-6ggrp" Dec 05 17:08:01 crc kubenswrapper[4753]: I1205 17:08:01.584545 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-64x2r" Dec 05 17:08:01 crc kubenswrapper[4753]: I1205 17:08:01.922334 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qt9vk" Dec 05 17:08:02 crc kubenswrapper[4753]: I1205 17:08:02.058882 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g5zgt\" (UniqueName: \"kubernetes.io/projected/9a495df2-eeed-4f2a-b492-cf5aaaaab8f9-kube-api-access-g5zgt\") pod \"9a495df2-eeed-4f2a-b492-cf5aaaaab8f9\" (UID: \"9a495df2-eeed-4f2a-b492-cf5aaaaab8f9\") " Dec 05 17:08:02 crc kubenswrapper[4753]: I1205 17:08:02.058943 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a495df2-eeed-4f2a-b492-cf5aaaaab8f9-catalog-content\") pod \"9a495df2-eeed-4f2a-b492-cf5aaaaab8f9\" (UID: \"9a495df2-eeed-4f2a-b492-cf5aaaaab8f9\") " Dec 05 17:08:02 crc kubenswrapper[4753]: I1205 17:08:02.059045 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a495df2-eeed-4f2a-b492-cf5aaaaab8f9-utilities\") pod \"9a495df2-eeed-4f2a-b492-cf5aaaaab8f9\" (UID: \"9a495df2-eeed-4f2a-b492-cf5aaaaab8f9\") " Dec 05 17:08:02 crc kubenswrapper[4753]: I1205 17:08:02.060403 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9a495df2-eeed-4f2a-b492-cf5aaaaab8f9-utilities" (OuterVolumeSpecName: "utilities") pod "9a495df2-eeed-4f2a-b492-cf5aaaaab8f9" (UID: "9a495df2-eeed-4f2a-b492-cf5aaaaab8f9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:08:02 crc kubenswrapper[4753]: I1205 17:08:02.067736 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9a495df2-eeed-4f2a-b492-cf5aaaaab8f9-kube-api-access-g5zgt" (OuterVolumeSpecName: "kube-api-access-g5zgt") pod "9a495df2-eeed-4f2a-b492-cf5aaaaab8f9" (UID: "9a495df2-eeed-4f2a-b492-cf5aaaaab8f9"). InnerVolumeSpecName "kube-api-access-g5zgt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:08:02 crc kubenswrapper[4753]: I1205 17:08:02.116242 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9a495df2-eeed-4f2a-b492-cf5aaaaab8f9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9a495df2-eeed-4f2a-b492-cf5aaaaab8f9" (UID: "9a495df2-eeed-4f2a-b492-cf5aaaaab8f9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:08:02 crc kubenswrapper[4753]: I1205 17:08:02.160963 4753 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a495df2-eeed-4f2a-b492-cf5aaaaab8f9-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 17:08:02 crc kubenswrapper[4753]: I1205 17:08:02.161002 4753 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a495df2-eeed-4f2a-b492-cf5aaaaab8f9-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 17:08:02 crc kubenswrapper[4753]: I1205 17:08:02.161012 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g5zgt\" (UniqueName: \"kubernetes.io/projected/9a495df2-eeed-4f2a-b492-cf5aaaaab8f9-kube-api-access-g5zgt\") on node \"crc\" DevicePath \"\"" Dec 05 17:08:02 crc kubenswrapper[4753]: I1205 17:08:02.541048 4753 generic.go:334] "Generic (PLEG): container finished" podID="9a495df2-eeed-4f2a-b492-cf5aaaaab8f9" containerID="03d07d3c50f6fed74c5b65bc90024353271f16d11f313d01fa1cbefcc2aeb7be" exitCode=0 Dec 05 17:08:02 crc kubenswrapper[4753]: I1205 17:08:02.541223 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qt9vk" Dec 05 17:08:02 crc kubenswrapper[4753]: I1205 17:08:02.541313 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qt9vk" event={"ID":"9a495df2-eeed-4f2a-b492-cf5aaaaab8f9","Type":"ContainerDied","Data":"03d07d3c50f6fed74c5b65bc90024353271f16d11f313d01fa1cbefcc2aeb7be"} Dec 05 17:08:02 crc kubenswrapper[4753]: I1205 17:08:02.541375 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qt9vk" event={"ID":"9a495df2-eeed-4f2a-b492-cf5aaaaab8f9","Type":"ContainerDied","Data":"0ad1763cb3f495581aaf2e6969b87aebbb92d87bf8d65c4a989126bd4c81479c"} Dec 05 17:08:02 crc kubenswrapper[4753]: I1205 17:08:02.541415 4753 scope.go:117] "RemoveContainer" containerID="03d07d3c50f6fed74c5b65bc90024353271f16d11f313d01fa1cbefcc2aeb7be" Dec 05 17:08:02 crc kubenswrapper[4753]: I1205 17:08:02.572117 4753 scope.go:117] "RemoveContainer" containerID="96a5a9eed098554f23948dee2d3db1b454beaafcf841d7ac421b53e7479917a3" Dec 05 17:08:02 crc kubenswrapper[4753]: I1205 17:08:02.592528 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-qt9vk"] Dec 05 17:08:02 crc kubenswrapper[4753]: I1205 17:08:02.598242 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-qt9vk"] Dec 05 17:08:02 crc kubenswrapper[4753]: I1205 17:08:02.603022 4753 scope.go:117] "RemoveContainer" containerID="29ff8ebe6b38250b5aecad785cb1281f6b97a105c721d7fd15e1706af717ea54" Dec 05 17:08:02 crc kubenswrapper[4753]: I1205 17:08:02.624874 4753 scope.go:117] "RemoveContainer" containerID="03d07d3c50f6fed74c5b65bc90024353271f16d11f313d01fa1cbefcc2aeb7be" Dec 05 17:08:02 crc kubenswrapper[4753]: E1205 17:08:02.625609 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"03d07d3c50f6fed74c5b65bc90024353271f16d11f313d01fa1cbefcc2aeb7be\": container with ID starting with 03d07d3c50f6fed74c5b65bc90024353271f16d11f313d01fa1cbefcc2aeb7be not found: ID does not exist" containerID="03d07d3c50f6fed74c5b65bc90024353271f16d11f313d01fa1cbefcc2aeb7be" Dec 05 17:08:02 crc kubenswrapper[4753]: I1205 17:08:02.625670 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"03d07d3c50f6fed74c5b65bc90024353271f16d11f313d01fa1cbefcc2aeb7be"} err="failed to get container status \"03d07d3c50f6fed74c5b65bc90024353271f16d11f313d01fa1cbefcc2aeb7be\": rpc error: code = NotFound desc = could not find container \"03d07d3c50f6fed74c5b65bc90024353271f16d11f313d01fa1cbefcc2aeb7be\": container with ID starting with 03d07d3c50f6fed74c5b65bc90024353271f16d11f313d01fa1cbefcc2aeb7be not found: ID does not exist" Dec 05 17:08:02 crc kubenswrapper[4753]: I1205 17:08:02.625706 4753 scope.go:117] "RemoveContainer" containerID="96a5a9eed098554f23948dee2d3db1b454beaafcf841d7ac421b53e7479917a3" Dec 05 17:08:02 crc kubenswrapper[4753]: E1205 17:08:02.626282 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"96a5a9eed098554f23948dee2d3db1b454beaafcf841d7ac421b53e7479917a3\": container with ID starting with 96a5a9eed098554f23948dee2d3db1b454beaafcf841d7ac421b53e7479917a3 not found: ID does not exist" containerID="96a5a9eed098554f23948dee2d3db1b454beaafcf841d7ac421b53e7479917a3" Dec 05 17:08:02 crc kubenswrapper[4753]: I1205 17:08:02.626322 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"96a5a9eed098554f23948dee2d3db1b454beaafcf841d7ac421b53e7479917a3"} err="failed to get container status \"96a5a9eed098554f23948dee2d3db1b454beaafcf841d7ac421b53e7479917a3\": rpc error: code = NotFound desc = could not find container \"96a5a9eed098554f23948dee2d3db1b454beaafcf841d7ac421b53e7479917a3\": container with ID starting with 96a5a9eed098554f23948dee2d3db1b454beaafcf841d7ac421b53e7479917a3 not found: ID does not exist" Dec 05 17:08:02 crc kubenswrapper[4753]: I1205 17:08:02.626356 4753 scope.go:117] "RemoveContainer" containerID="29ff8ebe6b38250b5aecad785cb1281f6b97a105c721d7fd15e1706af717ea54" Dec 05 17:08:02 crc kubenswrapper[4753]: E1205 17:08:02.626736 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"29ff8ebe6b38250b5aecad785cb1281f6b97a105c721d7fd15e1706af717ea54\": container with ID starting with 29ff8ebe6b38250b5aecad785cb1281f6b97a105c721d7fd15e1706af717ea54 not found: ID does not exist" containerID="29ff8ebe6b38250b5aecad785cb1281f6b97a105c721d7fd15e1706af717ea54" Dec 05 17:08:02 crc kubenswrapper[4753]: I1205 17:08:02.626797 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"29ff8ebe6b38250b5aecad785cb1281f6b97a105c721d7fd15e1706af717ea54"} err="failed to get container status \"29ff8ebe6b38250b5aecad785cb1281f6b97a105c721d7fd15e1706af717ea54\": rpc error: code = NotFound desc = could not find container \"29ff8ebe6b38250b5aecad785cb1281f6b97a105c721d7fd15e1706af717ea54\": container with ID starting with 29ff8ebe6b38250b5aecad785cb1281f6b97a105c721d7fd15e1706af717ea54 not found: ID does not exist" Dec 05 17:08:02 crc kubenswrapper[4753]: I1205 17:08:02.914689 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-64x2r"] Dec 05 17:08:03 crc kubenswrapper[4753]: I1205 17:08:03.562542 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-64x2r" podUID="c1873906-73e5-4c22-b547-233eb99f1562" containerName="registry-server" containerID="cri-o://2bd83132b01fe603fbcde8033c6f644d65226b42b6a5de83b59bcd9a6ef63abc" gracePeriod=2 Dec 05 17:08:03 crc kubenswrapper[4753]: E1205 17:08:03.700254 4753 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc1873906_73e5_4c22_b547_233eb99f1562.slice/crio-2bd83132b01fe603fbcde8033c6f644d65226b42b6a5de83b59bcd9a6ef63abc.scope\": RecentStats: unable to find data in memory cache]" Dec 05 17:08:03 crc kubenswrapper[4753]: I1205 17:08:03.733337 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9a495df2-eeed-4f2a-b492-cf5aaaaab8f9" path="/var/lib/kubelet/pods/9a495df2-eeed-4f2a-b492-cf5aaaaab8f9/volumes" Dec 05 17:08:03 crc kubenswrapper[4753]: I1205 17:08:03.960585 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-64x2r" Dec 05 17:08:04 crc kubenswrapper[4753]: I1205 17:08:04.089786 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wqw9m\" (UniqueName: \"kubernetes.io/projected/c1873906-73e5-4c22-b547-233eb99f1562-kube-api-access-wqw9m\") pod \"c1873906-73e5-4c22-b547-233eb99f1562\" (UID: \"c1873906-73e5-4c22-b547-233eb99f1562\") " Dec 05 17:08:04 crc kubenswrapper[4753]: I1205 17:08:04.089994 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1873906-73e5-4c22-b547-233eb99f1562-catalog-content\") pod \"c1873906-73e5-4c22-b547-233eb99f1562\" (UID: \"c1873906-73e5-4c22-b547-233eb99f1562\") " Dec 05 17:08:04 crc kubenswrapper[4753]: I1205 17:08:04.090070 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1873906-73e5-4c22-b547-233eb99f1562-utilities\") pod \"c1873906-73e5-4c22-b547-233eb99f1562\" (UID: \"c1873906-73e5-4c22-b547-233eb99f1562\") " Dec 05 17:08:04 crc kubenswrapper[4753]: I1205 17:08:04.091084 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c1873906-73e5-4c22-b547-233eb99f1562-utilities" (OuterVolumeSpecName: "utilities") pod "c1873906-73e5-4c22-b547-233eb99f1562" (UID: "c1873906-73e5-4c22-b547-233eb99f1562"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:08:04 crc kubenswrapper[4753]: I1205 17:08:04.098241 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c1873906-73e5-4c22-b547-233eb99f1562-kube-api-access-wqw9m" (OuterVolumeSpecName: "kube-api-access-wqw9m") pod "c1873906-73e5-4c22-b547-233eb99f1562" (UID: "c1873906-73e5-4c22-b547-233eb99f1562"). InnerVolumeSpecName "kube-api-access-wqw9m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:08:04 crc kubenswrapper[4753]: I1205 17:08:04.123553 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c1873906-73e5-4c22-b547-233eb99f1562-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c1873906-73e5-4c22-b547-233eb99f1562" (UID: "c1873906-73e5-4c22-b547-233eb99f1562"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:08:04 crc kubenswrapper[4753]: I1205 17:08:04.191277 4753 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1873906-73e5-4c22-b547-233eb99f1562-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 17:08:04 crc kubenswrapper[4753]: I1205 17:08:04.191793 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wqw9m\" (UniqueName: \"kubernetes.io/projected/c1873906-73e5-4c22-b547-233eb99f1562-kube-api-access-wqw9m\") on node \"crc\" DevicePath \"\"" Dec 05 17:08:04 crc kubenswrapper[4753]: I1205 17:08:04.191808 4753 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1873906-73e5-4c22-b547-233eb99f1562-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 17:08:04 crc kubenswrapper[4753]: I1205 17:08:04.571419 4753 generic.go:334] "Generic (PLEG): container finished" podID="c1873906-73e5-4c22-b547-233eb99f1562" containerID="2bd83132b01fe603fbcde8033c6f644d65226b42b6a5de83b59bcd9a6ef63abc" exitCode=0 Dec 05 17:08:04 crc kubenswrapper[4753]: I1205 17:08:04.571465 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-64x2r" event={"ID":"c1873906-73e5-4c22-b547-233eb99f1562","Type":"ContainerDied","Data":"2bd83132b01fe603fbcde8033c6f644d65226b42b6a5de83b59bcd9a6ef63abc"} Dec 05 17:08:04 crc kubenswrapper[4753]: I1205 17:08:04.571535 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-64x2r" event={"ID":"c1873906-73e5-4c22-b547-233eb99f1562","Type":"ContainerDied","Data":"dc75d39c3110787140772965b42a2a67699a11602526d4283c1bbe2aef09ee5a"} Dec 05 17:08:04 crc kubenswrapper[4753]: I1205 17:08:04.571564 4753 scope.go:117] "RemoveContainer" containerID="2bd83132b01fe603fbcde8033c6f644d65226b42b6a5de83b59bcd9a6ef63abc" Dec 05 17:08:04 crc kubenswrapper[4753]: I1205 17:08:04.571965 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-64x2r" Dec 05 17:08:04 crc kubenswrapper[4753]: I1205 17:08:04.595841 4753 scope.go:117] "RemoveContainer" containerID="4ef94ad49859273b39e13a547fb7850c223a0ce3d6f0b96e454c7d07d0ba7faf" Dec 05 17:08:04 crc kubenswrapper[4753]: I1205 17:08:04.607163 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-64x2r"] Dec 05 17:08:04 crc kubenswrapper[4753]: I1205 17:08:04.610405 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-64x2r"] Dec 05 17:08:04 crc kubenswrapper[4753]: I1205 17:08:04.639481 4753 scope.go:117] "RemoveContainer" containerID="8c07b28b001da477d5287627a69ec33a1a86d852b40b0af791d4da25d940d6e5" Dec 05 17:08:04 crc kubenswrapper[4753]: I1205 17:08:04.658268 4753 scope.go:117] "RemoveContainer" containerID="2bd83132b01fe603fbcde8033c6f644d65226b42b6a5de83b59bcd9a6ef63abc" Dec 05 17:08:04 crc kubenswrapper[4753]: E1205 17:08:04.658962 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2bd83132b01fe603fbcde8033c6f644d65226b42b6a5de83b59bcd9a6ef63abc\": container with ID starting with 2bd83132b01fe603fbcde8033c6f644d65226b42b6a5de83b59bcd9a6ef63abc not found: ID does not exist" containerID="2bd83132b01fe603fbcde8033c6f644d65226b42b6a5de83b59bcd9a6ef63abc" Dec 05 17:08:04 crc kubenswrapper[4753]: I1205 17:08:04.659041 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2bd83132b01fe603fbcde8033c6f644d65226b42b6a5de83b59bcd9a6ef63abc"} err="failed to get container status \"2bd83132b01fe603fbcde8033c6f644d65226b42b6a5de83b59bcd9a6ef63abc\": rpc error: code = NotFound desc = could not find container \"2bd83132b01fe603fbcde8033c6f644d65226b42b6a5de83b59bcd9a6ef63abc\": container with ID starting with 2bd83132b01fe603fbcde8033c6f644d65226b42b6a5de83b59bcd9a6ef63abc not found: ID does not exist" Dec 05 17:08:04 crc kubenswrapper[4753]: I1205 17:08:04.659082 4753 scope.go:117] "RemoveContainer" containerID="4ef94ad49859273b39e13a547fb7850c223a0ce3d6f0b96e454c7d07d0ba7faf" Dec 05 17:08:04 crc kubenswrapper[4753]: E1205 17:08:04.659795 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4ef94ad49859273b39e13a547fb7850c223a0ce3d6f0b96e454c7d07d0ba7faf\": container with ID starting with 4ef94ad49859273b39e13a547fb7850c223a0ce3d6f0b96e454c7d07d0ba7faf not found: ID does not exist" containerID="4ef94ad49859273b39e13a547fb7850c223a0ce3d6f0b96e454c7d07d0ba7faf" Dec 05 17:08:04 crc kubenswrapper[4753]: I1205 17:08:04.659839 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4ef94ad49859273b39e13a547fb7850c223a0ce3d6f0b96e454c7d07d0ba7faf"} err="failed to get container status \"4ef94ad49859273b39e13a547fb7850c223a0ce3d6f0b96e454c7d07d0ba7faf\": rpc error: code = NotFound desc = could not find container \"4ef94ad49859273b39e13a547fb7850c223a0ce3d6f0b96e454c7d07d0ba7faf\": container with ID starting with 4ef94ad49859273b39e13a547fb7850c223a0ce3d6f0b96e454c7d07d0ba7faf not found: ID does not exist" Dec 05 17:08:04 crc kubenswrapper[4753]: I1205 17:08:04.659872 4753 scope.go:117] "RemoveContainer" containerID="8c07b28b001da477d5287627a69ec33a1a86d852b40b0af791d4da25d940d6e5" Dec 05 17:08:04 crc kubenswrapper[4753]: E1205 17:08:04.660531 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8c07b28b001da477d5287627a69ec33a1a86d852b40b0af791d4da25d940d6e5\": container with ID starting with 8c07b28b001da477d5287627a69ec33a1a86d852b40b0af791d4da25d940d6e5 not found: ID does not exist" containerID="8c07b28b001da477d5287627a69ec33a1a86d852b40b0af791d4da25d940d6e5" Dec 05 17:08:04 crc kubenswrapper[4753]: I1205 17:08:04.660605 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8c07b28b001da477d5287627a69ec33a1a86d852b40b0af791d4da25d940d6e5"} err="failed to get container status \"8c07b28b001da477d5287627a69ec33a1a86d852b40b0af791d4da25d940d6e5\": rpc error: code = NotFound desc = could not find container \"8c07b28b001da477d5287627a69ec33a1a86d852b40b0af791d4da25d940d6e5\": container with ID starting with 8c07b28b001da477d5287627a69ec33a1a86d852b40b0af791d4da25d940d6e5 not found: ID does not exist" Dec 05 17:08:05 crc kubenswrapper[4753]: I1205 17:08:05.729031 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c1873906-73e5-4c22-b547-233eb99f1562" path="/var/lib/kubelet/pods/c1873906-73e5-4c22-b547-233eb99f1562/volumes" Dec 05 17:08:08 crc kubenswrapper[4753]: I1205 17:08:08.370482 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-cb4ng" Dec 05 17:08:09 crc kubenswrapper[4753]: I1205 17:08:09.594253 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-kzdfh"] Dec 05 17:08:11 crc kubenswrapper[4753]: I1205 17:08:11.168425 4753 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 05 17:08:11 crc kubenswrapper[4753]: E1205 17:08:11.169459 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a495df2-eeed-4f2a-b492-cf5aaaaab8f9" containerName="extract-utilities" Dec 05 17:08:11 crc kubenswrapper[4753]: I1205 17:08:11.169485 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a495df2-eeed-4f2a-b492-cf5aaaaab8f9" containerName="extract-utilities" Dec 05 17:08:11 crc kubenswrapper[4753]: E1205 17:08:11.169515 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1873906-73e5-4c22-b547-233eb99f1562" containerName="registry-server" Dec 05 17:08:11 crc kubenswrapper[4753]: I1205 17:08:11.169526 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1873906-73e5-4c22-b547-233eb99f1562" containerName="registry-server" Dec 05 17:08:11 crc kubenswrapper[4753]: E1205 17:08:11.169543 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a495df2-eeed-4f2a-b492-cf5aaaaab8f9" containerName="registry-server" Dec 05 17:08:11 crc kubenswrapper[4753]: I1205 17:08:11.169556 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a495df2-eeed-4f2a-b492-cf5aaaaab8f9" containerName="registry-server" Dec 05 17:08:11 crc kubenswrapper[4753]: E1205 17:08:11.169569 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1873906-73e5-4c22-b547-233eb99f1562" containerName="extract-utilities" Dec 05 17:08:11 crc kubenswrapper[4753]: I1205 17:08:11.169580 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1873906-73e5-4c22-b547-233eb99f1562" containerName="extract-utilities" Dec 05 17:08:11 crc kubenswrapper[4753]: E1205 17:08:11.169590 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd4c8ac7-365b-4f24-b44c-b1cbc91b339b" containerName="registry-server" Dec 05 17:08:11 crc kubenswrapper[4753]: I1205 17:08:11.169600 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd4c8ac7-365b-4f24-b44c-b1cbc91b339b" containerName="registry-server" Dec 05 17:08:11 crc kubenswrapper[4753]: E1205 17:08:11.169612 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d573bc3-94ca-449b-80f0-0baf670e2392" containerName="pruner" Dec 05 17:08:11 crc kubenswrapper[4753]: I1205 17:08:11.169620 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d573bc3-94ca-449b-80f0-0baf670e2392" containerName="pruner" Dec 05 17:08:11 crc kubenswrapper[4753]: E1205 17:08:11.169638 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="07c18cf7-d24d-46d2-90c1-0ef186b1c434" containerName="extract-utilities" Dec 05 17:08:11 crc kubenswrapper[4753]: I1205 17:08:11.169648 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="07c18cf7-d24d-46d2-90c1-0ef186b1c434" containerName="extract-utilities" Dec 05 17:08:11 crc kubenswrapper[4753]: E1205 17:08:11.169658 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd4c8ac7-365b-4f24-b44c-b1cbc91b339b" containerName="extract-utilities" Dec 05 17:08:11 crc kubenswrapper[4753]: I1205 17:08:11.169667 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd4c8ac7-365b-4f24-b44c-b1cbc91b339b" containerName="extract-utilities" Dec 05 17:08:11 crc kubenswrapper[4753]: E1205 17:08:11.169677 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="07c18cf7-d24d-46d2-90c1-0ef186b1c434" containerName="extract-content" Dec 05 17:08:11 crc kubenswrapper[4753]: I1205 17:08:11.169687 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="07c18cf7-d24d-46d2-90c1-0ef186b1c434" containerName="extract-content" Dec 05 17:08:11 crc kubenswrapper[4753]: E1205 17:08:11.169697 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1873906-73e5-4c22-b547-233eb99f1562" containerName="extract-content" Dec 05 17:08:11 crc kubenswrapper[4753]: I1205 17:08:11.169706 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1873906-73e5-4c22-b547-233eb99f1562" containerName="extract-content" Dec 05 17:08:11 crc kubenswrapper[4753]: E1205 17:08:11.169719 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="07c18cf7-d24d-46d2-90c1-0ef186b1c434" containerName="registry-server" Dec 05 17:08:11 crc kubenswrapper[4753]: I1205 17:08:11.169727 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="07c18cf7-d24d-46d2-90c1-0ef186b1c434" containerName="registry-server" Dec 05 17:08:11 crc kubenswrapper[4753]: E1205 17:08:11.169743 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd4c8ac7-365b-4f24-b44c-b1cbc91b339b" containerName="extract-content" Dec 05 17:08:11 crc kubenswrapper[4753]: I1205 17:08:11.169752 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd4c8ac7-365b-4f24-b44c-b1cbc91b339b" containerName="extract-content" Dec 05 17:08:11 crc kubenswrapper[4753]: E1205 17:08:11.169764 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a495df2-eeed-4f2a-b492-cf5aaaaab8f9" containerName="extract-content" Dec 05 17:08:11 crc kubenswrapper[4753]: I1205 17:08:11.169773 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a495df2-eeed-4f2a-b492-cf5aaaaab8f9" containerName="extract-content" Dec 05 17:08:11 crc kubenswrapper[4753]: I1205 17:08:11.169920 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="c1873906-73e5-4c22-b547-233eb99f1562" containerName="registry-server" Dec 05 17:08:11 crc kubenswrapper[4753]: I1205 17:08:11.169951 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="9a495df2-eeed-4f2a-b492-cf5aaaaab8f9" containerName="registry-server" Dec 05 17:08:11 crc kubenswrapper[4753]: I1205 17:08:11.169962 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="07c18cf7-d24d-46d2-90c1-0ef186b1c434" containerName="registry-server" Dec 05 17:08:11 crc kubenswrapper[4753]: I1205 17:08:11.169979 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd4c8ac7-365b-4f24-b44c-b1cbc91b339b" containerName="registry-server" Dec 05 17:08:11 crc kubenswrapper[4753]: I1205 17:08:11.169992 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d573bc3-94ca-449b-80f0-0baf670e2392" containerName="pruner" Dec 05 17:08:11 crc kubenswrapper[4753]: I1205 17:08:11.170705 4753 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 05 17:08:11 crc kubenswrapper[4753]: I1205 17:08:11.170746 4753 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 05 17:08:11 crc kubenswrapper[4753]: I1205 17:08:11.170875 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 17:08:11 crc kubenswrapper[4753]: I1205 17:08:11.171263 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://195b8bf18e6cf937def214e377ec9def799c19ea4d5f008b66cc42048656ffce" gracePeriod=15 Dec 05 17:08:11 crc kubenswrapper[4753]: I1205 17:08:11.171399 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://d3a5f37b26bfe7933088e08379662250e7b188efbbb5784d49e8d86262e10416" gracePeriod=15 Dec 05 17:08:11 crc kubenswrapper[4753]: I1205 17:08:11.171465 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://c0061bf86592b301a0a9398d2938e614f76616ed7ddcf310aa50c961632fb58b" gracePeriod=15 Dec 05 17:08:11 crc kubenswrapper[4753]: I1205 17:08:11.171428 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://20eb41ef00b7711fdfef0e4f3bd83b2fbdb193be96cafc0ef87f048271e7dd78" gracePeriod=15 Dec 05 17:08:11 crc kubenswrapper[4753]: I1205 17:08:11.171389 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://0585e546af65a61265267b97083556b811ad35ee6c48fa84262b3f4f25eaf907" gracePeriod=15 Dec 05 17:08:11 crc kubenswrapper[4753]: E1205 17:08:11.173130 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 05 17:08:11 crc kubenswrapper[4753]: I1205 17:08:11.173183 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 05 17:08:11 crc kubenswrapper[4753]: E1205 17:08:11.173197 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 05 17:08:11 crc kubenswrapper[4753]: I1205 17:08:11.173206 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 05 17:08:11 crc kubenswrapper[4753]: E1205 17:08:11.173227 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 17:08:11 crc kubenswrapper[4753]: I1205 17:08:11.173235 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 17:08:11 crc kubenswrapper[4753]: E1205 17:08:11.173248 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 17:08:11 crc kubenswrapper[4753]: I1205 17:08:11.173255 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 17:08:11 crc kubenswrapper[4753]: E1205 17:08:11.173267 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 05 17:08:11 crc kubenswrapper[4753]: I1205 17:08:11.173275 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 05 17:08:11 crc kubenswrapper[4753]: E1205 17:08:11.173287 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 05 17:08:11 crc kubenswrapper[4753]: I1205 17:08:11.173295 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 05 17:08:11 crc kubenswrapper[4753]: E1205 17:08:11.173304 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 05 17:08:11 crc kubenswrapper[4753]: I1205 17:08:11.173312 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 05 17:08:11 crc kubenswrapper[4753]: I1205 17:08:11.173464 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 17:08:11 crc kubenswrapper[4753]: I1205 17:08:11.173478 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 05 17:08:11 crc kubenswrapper[4753]: I1205 17:08:11.173499 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 05 17:08:11 crc kubenswrapper[4753]: I1205 17:08:11.173511 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 05 17:08:11 crc kubenswrapper[4753]: I1205 17:08:11.173520 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 17:08:11 crc kubenswrapper[4753]: I1205 17:08:11.173533 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 05 17:08:11 crc kubenswrapper[4753]: I1205 17:08:11.181771 4753 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="f4b27818a5e8e43d0dc095d08835c792" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" Dec 05 17:08:11 crc kubenswrapper[4753]: I1205 17:08:11.298442 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 17:08:11 crc kubenswrapper[4753]: I1205 17:08:11.298509 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 17:08:11 crc kubenswrapper[4753]: I1205 17:08:11.298555 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:08:11 crc kubenswrapper[4753]: I1205 17:08:11.298582 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 17:08:11 crc kubenswrapper[4753]: I1205 17:08:11.298616 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:08:11 crc kubenswrapper[4753]: I1205 17:08:11.298648 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:08:11 crc kubenswrapper[4753]: I1205 17:08:11.298666 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 17:08:11 crc kubenswrapper[4753]: I1205 17:08:11.298718 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 17:08:11 crc kubenswrapper[4753]: I1205 17:08:11.400056 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 17:08:11 crc kubenswrapper[4753]: I1205 17:08:11.400131 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:08:11 crc kubenswrapper[4753]: I1205 17:08:11.400192 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:08:11 crc kubenswrapper[4753]: I1205 17:08:11.400211 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 17:08:11 crc kubenswrapper[4753]: I1205 17:08:11.400216 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 17:08:11 crc kubenswrapper[4753]: I1205 17:08:11.400261 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 17:08:11 crc kubenswrapper[4753]: I1205 17:08:11.400289 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 17:08:11 crc kubenswrapper[4753]: I1205 17:08:11.400287 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:08:11 crc kubenswrapper[4753]: I1205 17:08:11.400324 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 17:08:11 crc kubenswrapper[4753]: I1205 17:08:11.400302 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 17:08:11 crc kubenswrapper[4753]: I1205 17:08:11.400333 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 17:08:11 crc kubenswrapper[4753]: I1205 17:08:11.400350 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:08:11 crc kubenswrapper[4753]: I1205 17:08:11.400485 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 17:08:11 crc kubenswrapper[4753]: I1205 17:08:11.400561 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 17:08:11 crc kubenswrapper[4753]: I1205 17:08:11.400592 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:08:11 crc kubenswrapper[4753]: I1205 17:08:11.400696 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:08:11 crc kubenswrapper[4753]: I1205 17:08:11.631065 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 05 17:08:11 crc kubenswrapper[4753]: I1205 17:08:11.632959 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 05 17:08:11 crc kubenswrapper[4753]: I1205 17:08:11.633811 4753 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="20eb41ef00b7711fdfef0e4f3bd83b2fbdb193be96cafc0ef87f048271e7dd78" exitCode=0 Dec 05 17:08:11 crc kubenswrapper[4753]: I1205 17:08:11.633853 4753 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="0585e546af65a61265267b97083556b811ad35ee6c48fa84262b3f4f25eaf907" exitCode=0 Dec 05 17:08:11 crc kubenswrapper[4753]: I1205 17:08:11.633865 4753 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="d3a5f37b26bfe7933088e08379662250e7b188efbbb5784d49e8d86262e10416" exitCode=0 Dec 05 17:08:11 crc kubenswrapper[4753]: I1205 17:08:11.633879 4753 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="c0061bf86592b301a0a9398d2938e614f76616ed7ddcf310aa50c961632fb58b" exitCode=2 Dec 05 17:08:11 crc kubenswrapper[4753]: I1205 17:08:11.633926 4753 scope.go:117] "RemoveContainer" containerID="0fea878c0275a3934d26327c13cf5a93ae3b881132788f3d031af5b382bbdd02" Dec 05 17:08:12 crc kubenswrapper[4753]: I1205 17:08:12.646927 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 05 17:08:13 crc kubenswrapper[4753]: I1205 17:08:13.657696 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 05 17:08:13 crc kubenswrapper[4753]: I1205 17:08:13.659938 4753 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="195b8bf18e6cf937def214e377ec9def799c19ea4d5f008b66cc42048656ffce" exitCode=0 Dec 05 17:08:13 crc kubenswrapper[4753]: I1205 17:08:13.660058 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f2b82c9b0984822d4653728ea247513afddd788e49b4b3c1fee17eda014310a9" Dec 05 17:08:13 crc kubenswrapper[4753]: I1205 17:08:13.698579 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 05 17:08:13 crc kubenswrapper[4753]: I1205 17:08:13.699927 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:08:13 crc kubenswrapper[4753]: I1205 17:08:13.700889 4753 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 17:08:13 crc kubenswrapper[4753]: I1205 17:08:13.843903 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 05 17:08:13 crc kubenswrapper[4753]: I1205 17:08:13.845104 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 05 17:08:13 crc kubenswrapper[4753]: I1205 17:08:13.844116 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 17:08:13 crc kubenswrapper[4753]: I1205 17:08:13.845140 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 17:08:13 crc kubenswrapper[4753]: I1205 17:08:13.845672 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 05 17:08:13 crc kubenswrapper[4753]: I1205 17:08:13.845753 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 17:08:13 crc kubenswrapper[4753]: I1205 17:08:13.846891 4753 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Dec 05 17:08:13 crc kubenswrapper[4753]: I1205 17:08:13.847043 4753 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 05 17:08:13 crc kubenswrapper[4753]: I1205 17:08:13.847183 4753 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 05 17:08:14 crc kubenswrapper[4753]: I1205 17:08:14.668065 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:08:14 crc kubenswrapper[4753]: I1205 17:08:14.669811 4753 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 17:08:14 crc kubenswrapper[4753]: I1205 17:08:14.682014 4753 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 17:08:15 crc kubenswrapper[4753]: I1205 17:08:15.730892 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Dec 05 17:08:16 crc kubenswrapper[4753]: E1205 17:08:16.216798 4753 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.233:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 17:08:16 crc kubenswrapper[4753]: I1205 17:08:16.218125 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 17:08:16 crc kubenswrapper[4753]: W1205 17:08:16.256779 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf85e55b1a89d02b0cb034b1ea31ed45a.slice/crio-13b2a82fcc04ff8b71962397ad25c62589497e767d7fd65a965825ced37c2f47 WatchSource:0}: Error finding container 13b2a82fcc04ff8b71962397ad25c62589497e767d7fd65a965825ced37c2f47: Status 404 returned error can't find the container with id 13b2a82fcc04ff8b71962397ad25c62589497e767d7fd65a965825ced37c2f47 Dec 05 17:08:16 crc kubenswrapper[4753]: E1205 17:08:16.261925 4753 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.233:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187e60c3625f00c8 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-05 17:08:16.261095624 +0000 UTC m=+234.764202650,LastTimestamp:2025-12-05 17:08:16.261095624 +0000 UTC m=+234.764202650,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 05 17:08:16 crc kubenswrapper[4753]: I1205 17:08:16.681939 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"085707f6811b95d4c230f0788450b195d839891d700b60a1241d856c184bc35f"} Dec 05 17:08:16 crc kubenswrapper[4753]: I1205 17:08:16.682000 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"13b2a82fcc04ff8b71962397ad25c62589497e767d7fd65a965825ced37c2f47"} Dec 05 17:08:16 crc kubenswrapper[4753]: E1205 17:08:16.682584 4753 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.233:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 17:08:16 crc kubenswrapper[4753]: I1205 17:08:16.684247 4753 generic.go:334] "Generic (PLEG): container finished" podID="a02c0cda-e1a3-4d5e-a64c-0a232d89b057" containerID="ea3568ffea086b577665daecf7670e6bb824487c18ef987c70c3163403e2d1f3" exitCode=0 Dec 05 17:08:16 crc kubenswrapper[4753]: I1205 17:08:16.684350 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"a02c0cda-e1a3-4d5e-a64c-0a232d89b057","Type":"ContainerDied","Data":"ea3568ffea086b577665daecf7670e6bb824487c18ef987c70c3163403e2d1f3"} Dec 05 17:08:16 crc kubenswrapper[4753]: I1205 17:08:16.685221 4753 status_manager.go:851] "Failed to get status for pod" podUID="a02c0cda-e1a3-4d5e-a64c-0a232d89b057" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 17:08:16 crc kubenswrapper[4753]: E1205 17:08:16.901146 4753 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 17:08:16 crc kubenswrapper[4753]: E1205 17:08:16.901465 4753 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 17:08:16 crc kubenswrapper[4753]: E1205 17:08:16.901692 4753 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 17:08:16 crc kubenswrapper[4753]: E1205 17:08:16.901884 4753 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 17:08:16 crc kubenswrapper[4753]: E1205 17:08:16.902099 4753 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 17:08:16 crc kubenswrapper[4753]: I1205 17:08:16.902118 4753 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Dec 05 17:08:16 crc kubenswrapper[4753]: E1205 17:08:16.902522 4753 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.233:6443: connect: connection refused" interval="200ms" Dec 05 17:08:17 crc kubenswrapper[4753]: E1205 17:08:17.103879 4753 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.233:6443: connect: connection refused" interval="400ms" Dec 05 17:08:17 crc kubenswrapper[4753]: E1205 17:08:17.505384 4753 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.233:6443: connect: connection refused" interval="800ms" Dec 05 17:08:17 crc kubenswrapper[4753]: I1205 17:08:17.953339 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 05 17:08:17 crc kubenswrapper[4753]: I1205 17:08:17.954518 4753 status_manager.go:851] "Failed to get status for pod" podUID="a02c0cda-e1a3-4d5e-a64c-0a232d89b057" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 17:08:18 crc kubenswrapper[4753]: I1205 17:08:18.115271 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/a02c0cda-e1a3-4d5e-a64c-0a232d89b057-var-lock\") pod \"a02c0cda-e1a3-4d5e-a64c-0a232d89b057\" (UID: \"a02c0cda-e1a3-4d5e-a64c-0a232d89b057\") " Dec 05 17:08:18 crc kubenswrapper[4753]: I1205 17:08:18.115351 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a02c0cda-e1a3-4d5e-a64c-0a232d89b057-var-lock" (OuterVolumeSpecName: "var-lock") pod "a02c0cda-e1a3-4d5e-a64c-0a232d89b057" (UID: "a02c0cda-e1a3-4d5e-a64c-0a232d89b057"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 17:08:18 crc kubenswrapper[4753]: I1205 17:08:18.115401 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a02c0cda-e1a3-4d5e-a64c-0a232d89b057-kubelet-dir\") pod \"a02c0cda-e1a3-4d5e-a64c-0a232d89b057\" (UID: \"a02c0cda-e1a3-4d5e-a64c-0a232d89b057\") " Dec 05 17:08:18 crc kubenswrapper[4753]: I1205 17:08:18.115545 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a02c0cda-e1a3-4d5e-a64c-0a232d89b057-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "a02c0cda-e1a3-4d5e-a64c-0a232d89b057" (UID: "a02c0cda-e1a3-4d5e-a64c-0a232d89b057"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 17:08:18 crc kubenswrapper[4753]: I1205 17:08:18.115586 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a02c0cda-e1a3-4d5e-a64c-0a232d89b057-kube-api-access\") pod \"a02c0cda-e1a3-4d5e-a64c-0a232d89b057\" (UID: \"a02c0cda-e1a3-4d5e-a64c-0a232d89b057\") " Dec 05 17:08:18 crc kubenswrapper[4753]: I1205 17:08:18.116160 4753 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/a02c0cda-e1a3-4d5e-a64c-0a232d89b057-var-lock\") on node \"crc\" DevicePath \"\"" Dec 05 17:08:18 crc kubenswrapper[4753]: I1205 17:08:18.116237 4753 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a02c0cda-e1a3-4d5e-a64c-0a232d89b057-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 05 17:08:18 crc kubenswrapper[4753]: I1205 17:08:18.126320 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a02c0cda-e1a3-4d5e-a64c-0a232d89b057-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "a02c0cda-e1a3-4d5e-a64c-0a232d89b057" (UID: "a02c0cda-e1a3-4d5e-a64c-0a232d89b057"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:08:18 crc kubenswrapper[4753]: I1205 17:08:18.218318 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a02c0cda-e1a3-4d5e-a64c-0a232d89b057-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 17:08:18 crc kubenswrapper[4753]: E1205 17:08:18.307389 4753 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.233:6443: connect: connection refused" interval="1.6s" Dec 05 17:08:18 crc kubenswrapper[4753]: I1205 17:08:18.705928 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"a02c0cda-e1a3-4d5e-a64c-0a232d89b057","Type":"ContainerDied","Data":"2dee77b3d5bf1e74b6c7a5c8c98bef468334d3ae029a74761b73e24d96047d51"} Dec 05 17:08:18 crc kubenswrapper[4753]: I1205 17:08:18.706010 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2dee77b3d5bf1e74b6c7a5c8c98bef468334d3ae029a74761b73e24d96047d51" Dec 05 17:08:18 crc kubenswrapper[4753]: I1205 17:08:18.706122 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 05 17:08:18 crc kubenswrapper[4753]: I1205 17:08:18.733587 4753 status_manager.go:851] "Failed to get status for pod" podUID="a02c0cda-e1a3-4d5e-a64c-0a232d89b057" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 17:08:19 crc kubenswrapper[4753]: E1205 17:08:19.618141 4753 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.233:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187e60c3625f00c8 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-05 17:08:16.261095624 +0000 UTC m=+234.764202650,LastTimestamp:2025-12-05 17:08:16.261095624 +0000 UTC m=+234.764202650,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 05 17:08:19 crc kubenswrapper[4753]: E1205 17:08:19.908845 4753 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.233:6443: connect: connection refused" interval="3.2s" Dec 05 17:08:20 crc kubenswrapper[4753]: E1205 17:08:20.757582 4753 desired_state_of_world_populator.go:312] "Error processing volume" err="error processing PVC openshift-image-registry/crc-image-registry-storage: failed to fetch PVC from API server: Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/persistentvolumeclaims/crc-image-registry-storage\": dial tcp 38.102.83.233:6443: connect: connection refused" pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" volumeName="registry-storage" Dec 05 17:08:21 crc kubenswrapper[4753]: I1205 17:08:21.735548 4753 status_manager.go:851] "Failed to get status for pod" podUID="a02c0cda-e1a3-4d5e-a64c-0a232d89b057" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 17:08:23 crc kubenswrapper[4753]: E1205 17:08:23.110211 4753 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.233:6443: connect: connection refused" interval="6.4s" Dec 05 17:08:24 crc kubenswrapper[4753]: I1205 17:08:24.719961 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:08:24 crc kubenswrapper[4753]: I1205 17:08:24.722820 4753 status_manager.go:851] "Failed to get status for pod" podUID="a02c0cda-e1a3-4d5e-a64c-0a232d89b057" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 17:08:24 crc kubenswrapper[4753]: I1205 17:08:24.745549 4753 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="ee37fdbe-64d5-4bb1-8522-932d83e0648e" Dec 05 17:08:24 crc kubenswrapper[4753]: I1205 17:08:24.745606 4753 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="ee37fdbe-64d5-4bb1-8522-932d83e0648e" Dec 05 17:08:24 crc kubenswrapper[4753]: E1205 17:08:24.746472 4753 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:08:24 crc kubenswrapper[4753]: I1205 17:08:24.747273 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:08:25 crc kubenswrapper[4753]: I1205 17:08:25.778563 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 05 17:08:25 crc kubenswrapper[4753]: I1205 17:08:25.779117 4753 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="0f6e27e109eeb52dae4e4b35bdf84e78f0786fc8d15c2498dd8d8c70c3d64e4d" exitCode=1 Dec 05 17:08:25 crc kubenswrapper[4753]: I1205 17:08:25.779207 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"0f6e27e109eeb52dae4e4b35bdf84e78f0786fc8d15c2498dd8d8c70c3d64e4d"} Dec 05 17:08:25 crc kubenswrapper[4753]: I1205 17:08:25.780500 4753 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 17:08:25 crc kubenswrapper[4753]: I1205 17:08:25.780665 4753 scope.go:117] "RemoveContainer" containerID="0f6e27e109eeb52dae4e4b35bdf84e78f0786fc8d15c2498dd8d8c70c3d64e4d" Dec 05 17:08:25 crc kubenswrapper[4753]: I1205 17:08:25.780786 4753 status_manager.go:851] "Failed to get status for pod" podUID="a02c0cda-e1a3-4d5e-a64c-0a232d89b057" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 17:08:25 crc kubenswrapper[4753]: I1205 17:08:25.781590 4753 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="e526fc80a1c90ff318aa4f78faa1f3cd2e7cf28362260a757470828dc350e47d" exitCode=0 Dec 05 17:08:25 crc kubenswrapper[4753]: I1205 17:08:25.781637 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"e526fc80a1c90ff318aa4f78faa1f3cd2e7cf28362260a757470828dc350e47d"} Dec 05 17:08:25 crc kubenswrapper[4753]: I1205 17:08:25.781679 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"6ab2e31683848334b20945ac2557e112c703f6bb1f08d185a93f264ab45adefb"} Dec 05 17:08:25 crc kubenswrapper[4753]: I1205 17:08:25.782006 4753 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="ee37fdbe-64d5-4bb1-8522-932d83e0648e" Dec 05 17:08:25 crc kubenswrapper[4753]: I1205 17:08:25.782027 4753 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="ee37fdbe-64d5-4bb1-8522-932d83e0648e" Dec 05 17:08:25 crc kubenswrapper[4753]: E1205 17:08:25.782448 4753 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:08:25 crc kubenswrapper[4753]: I1205 17:08:25.782961 4753 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 17:08:25 crc kubenswrapper[4753]: I1205 17:08:25.783252 4753 status_manager.go:851] "Failed to get status for pod" podUID="a02c0cda-e1a3-4d5e-a64c-0a232d89b057" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.233:6443: connect: connection refused" Dec 05 17:08:26 crc kubenswrapper[4753]: I1205 17:08:26.792280 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 05 17:08:26 crc kubenswrapper[4753]: I1205 17:08:26.793164 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"b888da62eca45b4de4ea434f4382414730b7b2c92618756657b8158f45303700"} Dec 05 17:08:26 crc kubenswrapper[4753]: I1205 17:08:26.797916 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"56a65ebb26447437b8f03838000b264ce70be8b274de8691e208a0a2fab25d46"} Dec 05 17:08:26 crc kubenswrapper[4753]: I1205 17:08:26.797975 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"1303d2a60e6018e30f505b5b39a311eb8aebc127bfa85dae433fd4871915cc57"} Dec 05 17:08:26 crc kubenswrapper[4753]: I1205 17:08:26.797994 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"a1039bb51a31e3ab5615a893ee46c1f3a341827a484de94a3568388a6782b167"} Dec 05 17:08:26 crc kubenswrapper[4753]: I1205 17:08:26.798009 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"779e25a3ac95556d1c92874b9af75505ac2541f4a596902bcfd447bc4e8c3015"} Dec 05 17:08:27 crc kubenswrapper[4753]: I1205 17:08:27.840867 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"071b2b6bdcac3f0134807783a9efde4f8377cf141754d04c275595a1faf453d1"} Dec 05 17:08:27 crc kubenswrapper[4753]: I1205 17:08:27.841408 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:08:27 crc kubenswrapper[4753]: I1205 17:08:27.841248 4753 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="ee37fdbe-64d5-4bb1-8522-932d83e0648e" Dec 05 17:08:27 crc kubenswrapper[4753]: I1205 17:08:27.841443 4753 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="ee37fdbe-64d5-4bb1-8522-932d83e0648e" Dec 05 17:08:29 crc kubenswrapper[4753]: I1205 17:08:29.748173 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:08:29 crc kubenswrapper[4753]: I1205 17:08:29.749128 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:08:29 crc kubenswrapper[4753]: I1205 17:08:29.756753 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:08:32 crc kubenswrapper[4753]: I1205 17:08:32.856477 4753 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:08:33 crc kubenswrapper[4753]: I1205 17:08:33.766538 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 17:08:33 crc kubenswrapper[4753]: I1205 17:08:33.880122 4753 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="ee37fdbe-64d5-4bb1-8522-932d83e0648e" Dec 05 17:08:33 crc kubenswrapper[4753]: I1205 17:08:33.880192 4753 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="ee37fdbe-64d5-4bb1-8522-932d83e0648e" Dec 05 17:08:33 crc kubenswrapper[4753]: I1205 17:08:33.886743 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:08:33 crc kubenswrapper[4753]: I1205 17:08:33.894586 4753 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="e0ef1591-4705-4161-bdaa-be0f8f787819" Dec 05 17:08:34 crc kubenswrapper[4753]: I1205 17:08:34.626755 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-kzdfh" podUID="c3117f4b-6f3a-4131-9001-d39222e6f268" containerName="oauth-openshift" containerID="cri-o://27d65245dab10d3567db7e681dd5e984b902ce2f2fdbb9a740d47b5267a2294c" gracePeriod=15 Dec 05 17:08:34 crc kubenswrapper[4753]: I1205 17:08:34.886044 4753 generic.go:334] "Generic (PLEG): container finished" podID="c3117f4b-6f3a-4131-9001-d39222e6f268" containerID="27d65245dab10d3567db7e681dd5e984b902ce2f2fdbb9a740d47b5267a2294c" exitCode=0 Dec 05 17:08:34 crc kubenswrapper[4753]: I1205 17:08:34.886806 4753 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="ee37fdbe-64d5-4bb1-8522-932d83e0648e" Dec 05 17:08:34 crc kubenswrapper[4753]: I1205 17:08:34.886820 4753 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="ee37fdbe-64d5-4bb1-8522-932d83e0648e" Dec 05 17:08:34 crc kubenswrapper[4753]: I1205 17:08:34.886134 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-kzdfh" event={"ID":"c3117f4b-6f3a-4131-9001-d39222e6f268","Type":"ContainerDied","Data":"27d65245dab10d3567db7e681dd5e984b902ce2f2fdbb9a740d47b5267a2294c"} Dec 05 17:08:34 crc kubenswrapper[4753]: I1205 17:08:34.976575 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 17:08:34 crc kubenswrapper[4753]: I1205 17:08:34.976772 4753 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Dec 05 17:08:34 crc kubenswrapper[4753]: I1205 17:08:34.977936 4753 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Dec 05 17:08:35 crc kubenswrapper[4753]: I1205 17:08:35.034498 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-kzdfh" Dec 05 17:08:35 crc kubenswrapper[4753]: I1205 17:08:35.098763 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c3117f4b-6f3a-4131-9001-d39222e6f268-v4-0-config-system-trusted-ca-bundle\") pod \"c3117f4b-6f3a-4131-9001-d39222e6f268\" (UID: \"c3117f4b-6f3a-4131-9001-d39222e6f268\") " Dec 05 17:08:35 crc kubenswrapper[4753]: I1205 17:08:35.098811 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/c3117f4b-6f3a-4131-9001-d39222e6f268-v4-0-config-system-cliconfig\") pod \"c3117f4b-6f3a-4131-9001-d39222e6f268\" (UID: \"c3117f4b-6f3a-4131-9001-d39222e6f268\") " Dec 05 17:08:35 crc kubenswrapper[4753]: I1205 17:08:35.098837 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/c3117f4b-6f3a-4131-9001-d39222e6f268-v4-0-config-system-serving-cert\") pod \"c3117f4b-6f3a-4131-9001-d39222e6f268\" (UID: \"c3117f4b-6f3a-4131-9001-d39222e6f268\") " Dec 05 17:08:35 crc kubenswrapper[4753]: I1205 17:08:35.098897 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/c3117f4b-6f3a-4131-9001-d39222e6f268-v4-0-config-system-service-ca\") pod \"c3117f4b-6f3a-4131-9001-d39222e6f268\" (UID: \"c3117f4b-6f3a-4131-9001-d39222e6f268\") " Dec 05 17:08:35 crc kubenswrapper[4753]: I1205 17:08:35.098930 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k2kb8\" (UniqueName: \"kubernetes.io/projected/c3117f4b-6f3a-4131-9001-d39222e6f268-kube-api-access-k2kb8\") pod \"c3117f4b-6f3a-4131-9001-d39222e6f268\" (UID: \"c3117f4b-6f3a-4131-9001-d39222e6f268\") " Dec 05 17:08:35 crc kubenswrapper[4753]: I1205 17:08:35.098975 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/c3117f4b-6f3a-4131-9001-d39222e6f268-v4-0-config-system-ocp-branding-template\") pod \"c3117f4b-6f3a-4131-9001-d39222e6f268\" (UID: \"c3117f4b-6f3a-4131-9001-d39222e6f268\") " Dec 05 17:08:35 crc kubenswrapper[4753]: I1205 17:08:35.099007 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/c3117f4b-6f3a-4131-9001-d39222e6f268-v4-0-config-user-idp-0-file-data\") pod \"c3117f4b-6f3a-4131-9001-d39222e6f268\" (UID: \"c3117f4b-6f3a-4131-9001-d39222e6f268\") " Dec 05 17:08:35 crc kubenswrapper[4753]: I1205 17:08:35.099039 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/c3117f4b-6f3a-4131-9001-d39222e6f268-v4-0-config-user-template-login\") pod \"c3117f4b-6f3a-4131-9001-d39222e6f268\" (UID: \"c3117f4b-6f3a-4131-9001-d39222e6f268\") " Dec 05 17:08:35 crc kubenswrapper[4753]: I1205 17:08:35.099058 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/c3117f4b-6f3a-4131-9001-d39222e6f268-v4-0-config-user-template-error\") pod \"c3117f4b-6f3a-4131-9001-d39222e6f268\" (UID: \"c3117f4b-6f3a-4131-9001-d39222e6f268\") " Dec 05 17:08:35 crc kubenswrapper[4753]: I1205 17:08:35.099106 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/c3117f4b-6f3a-4131-9001-d39222e6f268-v4-0-config-system-router-certs\") pod \"c3117f4b-6f3a-4131-9001-d39222e6f268\" (UID: \"c3117f4b-6f3a-4131-9001-d39222e6f268\") " Dec 05 17:08:35 crc kubenswrapper[4753]: I1205 17:08:35.099133 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/c3117f4b-6f3a-4131-9001-d39222e6f268-v4-0-config-user-template-provider-selection\") pod \"c3117f4b-6f3a-4131-9001-d39222e6f268\" (UID: \"c3117f4b-6f3a-4131-9001-d39222e6f268\") " Dec 05 17:08:35 crc kubenswrapper[4753]: I1205 17:08:35.099197 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/c3117f4b-6f3a-4131-9001-d39222e6f268-v4-0-config-system-session\") pod \"c3117f4b-6f3a-4131-9001-d39222e6f268\" (UID: \"c3117f4b-6f3a-4131-9001-d39222e6f268\") " Dec 05 17:08:35 crc kubenswrapper[4753]: I1205 17:08:35.099230 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/c3117f4b-6f3a-4131-9001-d39222e6f268-audit-policies\") pod \"c3117f4b-6f3a-4131-9001-d39222e6f268\" (UID: \"c3117f4b-6f3a-4131-9001-d39222e6f268\") " Dec 05 17:08:35 crc kubenswrapper[4753]: I1205 17:08:35.099292 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/c3117f4b-6f3a-4131-9001-d39222e6f268-audit-dir\") pod \"c3117f4b-6f3a-4131-9001-d39222e6f268\" (UID: \"c3117f4b-6f3a-4131-9001-d39222e6f268\") " Dec 05 17:08:35 crc kubenswrapper[4753]: I1205 17:08:35.099662 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c3117f4b-6f3a-4131-9001-d39222e6f268-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "c3117f4b-6f3a-4131-9001-d39222e6f268" (UID: "c3117f4b-6f3a-4131-9001-d39222e6f268"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 17:08:35 crc kubenswrapper[4753]: I1205 17:08:35.101262 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c3117f4b-6f3a-4131-9001-d39222e6f268-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "c3117f4b-6f3a-4131-9001-d39222e6f268" (UID: "c3117f4b-6f3a-4131-9001-d39222e6f268"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:08:35 crc kubenswrapper[4753]: I1205 17:08:35.101287 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c3117f4b-6f3a-4131-9001-d39222e6f268-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "c3117f4b-6f3a-4131-9001-d39222e6f268" (UID: "c3117f4b-6f3a-4131-9001-d39222e6f268"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:08:35 crc kubenswrapper[4753]: I1205 17:08:35.101936 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c3117f4b-6f3a-4131-9001-d39222e6f268-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "c3117f4b-6f3a-4131-9001-d39222e6f268" (UID: "c3117f4b-6f3a-4131-9001-d39222e6f268"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:08:35 crc kubenswrapper[4753]: I1205 17:08:35.102094 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c3117f4b-6f3a-4131-9001-d39222e6f268-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "c3117f4b-6f3a-4131-9001-d39222e6f268" (UID: "c3117f4b-6f3a-4131-9001-d39222e6f268"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:08:35 crc kubenswrapper[4753]: I1205 17:08:35.107297 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c3117f4b-6f3a-4131-9001-d39222e6f268-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "c3117f4b-6f3a-4131-9001-d39222e6f268" (UID: "c3117f4b-6f3a-4131-9001-d39222e6f268"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:08:35 crc kubenswrapper[4753]: I1205 17:08:35.107520 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c3117f4b-6f3a-4131-9001-d39222e6f268-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "c3117f4b-6f3a-4131-9001-d39222e6f268" (UID: "c3117f4b-6f3a-4131-9001-d39222e6f268"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:08:35 crc kubenswrapper[4753]: I1205 17:08:35.107621 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c3117f4b-6f3a-4131-9001-d39222e6f268-kube-api-access-k2kb8" (OuterVolumeSpecName: "kube-api-access-k2kb8") pod "c3117f4b-6f3a-4131-9001-d39222e6f268" (UID: "c3117f4b-6f3a-4131-9001-d39222e6f268"). InnerVolumeSpecName "kube-api-access-k2kb8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:08:35 crc kubenswrapper[4753]: I1205 17:08:35.107808 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c3117f4b-6f3a-4131-9001-d39222e6f268-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "c3117f4b-6f3a-4131-9001-d39222e6f268" (UID: "c3117f4b-6f3a-4131-9001-d39222e6f268"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:08:35 crc kubenswrapper[4753]: I1205 17:08:35.108448 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c3117f4b-6f3a-4131-9001-d39222e6f268-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "c3117f4b-6f3a-4131-9001-d39222e6f268" (UID: "c3117f4b-6f3a-4131-9001-d39222e6f268"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:08:35 crc kubenswrapper[4753]: I1205 17:08:35.108664 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c3117f4b-6f3a-4131-9001-d39222e6f268-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "c3117f4b-6f3a-4131-9001-d39222e6f268" (UID: "c3117f4b-6f3a-4131-9001-d39222e6f268"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:08:35 crc kubenswrapper[4753]: I1205 17:08:35.108856 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c3117f4b-6f3a-4131-9001-d39222e6f268-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "c3117f4b-6f3a-4131-9001-d39222e6f268" (UID: "c3117f4b-6f3a-4131-9001-d39222e6f268"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:08:35 crc kubenswrapper[4753]: I1205 17:08:35.108975 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c3117f4b-6f3a-4131-9001-d39222e6f268-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "c3117f4b-6f3a-4131-9001-d39222e6f268" (UID: "c3117f4b-6f3a-4131-9001-d39222e6f268"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:08:35 crc kubenswrapper[4753]: I1205 17:08:35.114819 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c3117f4b-6f3a-4131-9001-d39222e6f268-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "c3117f4b-6f3a-4131-9001-d39222e6f268" (UID: "c3117f4b-6f3a-4131-9001-d39222e6f268"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:08:35 crc kubenswrapper[4753]: I1205 17:08:35.200456 4753 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c3117f4b-6f3a-4131-9001-d39222e6f268-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:08:35 crc kubenswrapper[4753]: I1205 17:08:35.200513 4753 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/c3117f4b-6f3a-4131-9001-d39222e6f268-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 05 17:08:35 crc kubenswrapper[4753]: I1205 17:08:35.200524 4753 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/c3117f4b-6f3a-4131-9001-d39222e6f268-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 17:08:35 crc kubenswrapper[4753]: I1205 17:08:35.200536 4753 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/c3117f4b-6f3a-4131-9001-d39222e6f268-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 17:08:35 crc kubenswrapper[4753]: I1205 17:08:35.200546 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k2kb8\" (UniqueName: \"kubernetes.io/projected/c3117f4b-6f3a-4131-9001-d39222e6f268-kube-api-access-k2kb8\") on node \"crc\" DevicePath \"\"" Dec 05 17:08:35 crc kubenswrapper[4753]: I1205 17:08:35.200558 4753 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/c3117f4b-6f3a-4131-9001-d39222e6f268-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 05 17:08:35 crc kubenswrapper[4753]: I1205 17:08:35.200570 4753 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/c3117f4b-6f3a-4131-9001-d39222e6f268-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:08:35 crc kubenswrapper[4753]: I1205 17:08:35.200581 4753 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/c3117f4b-6f3a-4131-9001-d39222e6f268-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 05 17:08:35 crc kubenswrapper[4753]: I1205 17:08:35.200591 4753 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/c3117f4b-6f3a-4131-9001-d39222e6f268-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 05 17:08:35 crc kubenswrapper[4753]: I1205 17:08:35.200602 4753 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/c3117f4b-6f3a-4131-9001-d39222e6f268-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 05 17:08:35 crc kubenswrapper[4753]: I1205 17:08:35.200616 4753 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/c3117f4b-6f3a-4131-9001-d39222e6f268-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 05 17:08:35 crc kubenswrapper[4753]: I1205 17:08:35.200630 4753 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/c3117f4b-6f3a-4131-9001-d39222e6f268-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 05 17:08:35 crc kubenswrapper[4753]: I1205 17:08:35.200642 4753 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/c3117f4b-6f3a-4131-9001-d39222e6f268-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 05 17:08:35 crc kubenswrapper[4753]: I1205 17:08:35.200653 4753 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/c3117f4b-6f3a-4131-9001-d39222e6f268-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 05 17:08:35 crc kubenswrapper[4753]: I1205 17:08:35.896310 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-kzdfh" event={"ID":"c3117f4b-6f3a-4131-9001-d39222e6f268","Type":"ContainerDied","Data":"89cda6ec6470579314e60d6ab523d19e143eada5e55a3d46fab7c9ba48883d40"} Dec 05 17:08:35 crc kubenswrapper[4753]: I1205 17:08:35.896366 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-kzdfh" Dec 05 17:08:35 crc kubenswrapper[4753]: I1205 17:08:35.896368 4753 scope.go:117] "RemoveContainer" containerID="27d65245dab10d3567db7e681dd5e984b902ce2f2fdbb9a740d47b5267a2294c" Dec 05 17:08:41 crc kubenswrapper[4753]: I1205 17:08:41.733726 4753 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="e0ef1591-4705-4161-bdaa-be0f8f787819" Dec 05 17:08:42 crc kubenswrapper[4753]: I1205 17:08:42.081272 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 05 17:08:42 crc kubenswrapper[4753]: I1205 17:08:42.426037 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 05 17:08:42 crc kubenswrapper[4753]: I1205 17:08:42.471652 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 05 17:08:43 crc kubenswrapper[4753]: I1205 17:08:43.053437 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 05 17:08:43 crc kubenswrapper[4753]: I1205 17:08:43.122455 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 05 17:08:43 crc kubenswrapper[4753]: I1205 17:08:43.330083 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 05 17:08:43 crc kubenswrapper[4753]: I1205 17:08:43.403854 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 05 17:08:43 crc kubenswrapper[4753]: I1205 17:08:43.765055 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 05 17:08:43 crc kubenswrapper[4753]: I1205 17:08:43.798379 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 05 17:08:43 crc kubenswrapper[4753]: I1205 17:08:43.892328 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 05 17:08:44 crc kubenswrapper[4753]: I1205 17:08:44.047895 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 05 17:08:44 crc kubenswrapper[4753]: I1205 17:08:44.255575 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 05 17:08:44 crc kubenswrapper[4753]: I1205 17:08:44.316198 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 05 17:08:44 crc kubenswrapper[4753]: I1205 17:08:44.512791 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 05 17:08:44 crc kubenswrapper[4753]: I1205 17:08:44.690512 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 05 17:08:44 crc kubenswrapper[4753]: I1205 17:08:44.729693 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 05 17:08:44 crc kubenswrapper[4753]: I1205 17:08:44.744449 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 05 17:08:44 crc kubenswrapper[4753]: I1205 17:08:44.813635 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 05 17:08:44 crc kubenswrapper[4753]: I1205 17:08:44.977531 4753 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Dec 05 17:08:44 crc kubenswrapper[4753]: I1205 17:08:44.977635 4753 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Dec 05 17:08:45 crc kubenswrapper[4753]: I1205 17:08:45.140566 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 05 17:08:45 crc kubenswrapper[4753]: I1205 17:08:45.199745 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 05 17:08:45 crc kubenswrapper[4753]: I1205 17:08:45.385634 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 05 17:08:45 crc kubenswrapper[4753]: I1205 17:08:45.402280 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 05 17:08:45 crc kubenswrapper[4753]: I1205 17:08:45.463037 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 05 17:08:45 crc kubenswrapper[4753]: I1205 17:08:45.525964 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 05 17:08:45 crc kubenswrapper[4753]: I1205 17:08:45.559906 4753 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 05 17:08:45 crc kubenswrapper[4753]: I1205 17:08:45.634428 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 05 17:08:45 crc kubenswrapper[4753]: I1205 17:08:45.877948 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 05 17:08:45 crc kubenswrapper[4753]: I1205 17:08:45.891024 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 05 17:08:45 crc kubenswrapper[4753]: I1205 17:08:45.944858 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 05 17:08:45 crc kubenswrapper[4753]: I1205 17:08:45.980575 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 05 17:08:46 crc kubenswrapper[4753]: I1205 17:08:46.164185 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 05 17:08:46 crc kubenswrapper[4753]: I1205 17:08:46.266473 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 05 17:08:46 crc kubenswrapper[4753]: I1205 17:08:46.267093 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 05 17:08:46 crc kubenswrapper[4753]: I1205 17:08:46.409706 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 05 17:08:46 crc kubenswrapper[4753]: I1205 17:08:46.426368 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 05 17:08:46 crc kubenswrapper[4753]: I1205 17:08:46.465814 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 05 17:08:46 crc kubenswrapper[4753]: I1205 17:08:46.489490 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 05 17:08:46 crc kubenswrapper[4753]: I1205 17:08:46.529236 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 05 17:08:46 crc kubenswrapper[4753]: I1205 17:08:46.533637 4753 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 05 17:08:46 crc kubenswrapper[4753]: I1205 17:08:46.560880 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 05 17:08:46 crc kubenswrapper[4753]: I1205 17:08:46.619768 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 05 17:08:46 crc kubenswrapper[4753]: I1205 17:08:46.676228 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 05 17:08:46 crc kubenswrapper[4753]: I1205 17:08:46.909038 4753 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 05 17:08:46 crc kubenswrapper[4753]: I1205 17:08:46.922455 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-kzdfh","openshift-kube-apiserver/kube-apiserver-crc"] Dec 05 17:08:46 crc kubenswrapper[4753]: I1205 17:08:46.922573 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 05 17:08:46 crc kubenswrapper[4753]: I1205 17:08:46.927687 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:08:46 crc kubenswrapper[4753]: I1205 17:08:46.940964 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=14.940937351 podStartE2EDuration="14.940937351s" podCreationTimestamp="2025-12-05 17:08:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:08:46.940681473 +0000 UTC m=+265.443788479" watchObservedRunningTime="2025-12-05 17:08:46.940937351 +0000 UTC m=+265.444044357" Dec 05 17:08:47 crc kubenswrapper[4753]: I1205 17:08:47.028536 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 05 17:08:47 crc kubenswrapper[4753]: I1205 17:08:47.169327 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 05 17:08:47 crc kubenswrapper[4753]: I1205 17:08:47.312946 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 05 17:08:47 crc kubenswrapper[4753]: I1205 17:08:47.423473 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 05 17:08:47 crc kubenswrapper[4753]: I1205 17:08:47.566576 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 05 17:08:47 crc kubenswrapper[4753]: I1205 17:08:47.575042 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 05 17:08:47 crc kubenswrapper[4753]: I1205 17:08:47.647866 4753 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 05 17:08:47 crc kubenswrapper[4753]: I1205 17:08:47.701005 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 05 17:08:47 crc kubenswrapper[4753]: I1205 17:08:47.727266 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c3117f4b-6f3a-4131-9001-d39222e6f268" path="/var/lib/kubelet/pods/c3117f4b-6f3a-4131-9001-d39222e6f268/volumes" Dec 05 17:08:47 crc kubenswrapper[4753]: I1205 17:08:47.749929 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 05 17:08:47 crc kubenswrapper[4753]: I1205 17:08:47.801090 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 05 17:08:47 crc kubenswrapper[4753]: I1205 17:08:47.932063 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 05 17:08:47 crc kubenswrapper[4753]: I1205 17:08:47.949946 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 05 17:08:48 crc kubenswrapper[4753]: I1205 17:08:48.031391 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 05 17:08:48 crc kubenswrapper[4753]: I1205 17:08:48.032382 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 05 17:08:48 crc kubenswrapper[4753]: I1205 17:08:48.208762 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 05 17:08:48 crc kubenswrapper[4753]: I1205 17:08:48.248110 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 05 17:08:48 crc kubenswrapper[4753]: I1205 17:08:48.267310 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 05 17:08:48 crc kubenswrapper[4753]: I1205 17:08:48.301367 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 05 17:08:48 crc kubenswrapper[4753]: I1205 17:08:48.384168 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 05 17:08:48 crc kubenswrapper[4753]: I1205 17:08:48.489632 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 05 17:08:48 crc kubenswrapper[4753]: I1205 17:08:48.692018 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 05 17:08:48 crc kubenswrapper[4753]: I1205 17:08:48.779602 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 05 17:08:48 crc kubenswrapper[4753]: I1205 17:08:48.825729 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 05 17:08:48 crc kubenswrapper[4753]: I1205 17:08:48.851573 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 05 17:08:48 crc kubenswrapper[4753]: I1205 17:08:48.869316 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 05 17:08:48 crc kubenswrapper[4753]: I1205 17:08:48.892062 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 05 17:08:48 crc kubenswrapper[4753]: I1205 17:08:48.893480 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 05 17:08:49 crc kubenswrapper[4753]: I1205 17:08:49.027070 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 05 17:08:49 crc kubenswrapper[4753]: I1205 17:08:49.078808 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 05 17:08:49 crc kubenswrapper[4753]: I1205 17:08:49.110115 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 05 17:08:49 crc kubenswrapper[4753]: I1205 17:08:49.153687 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 05 17:08:49 crc kubenswrapper[4753]: I1205 17:08:49.178779 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 05 17:08:49 crc kubenswrapper[4753]: I1205 17:08:49.291121 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 05 17:08:49 crc kubenswrapper[4753]: I1205 17:08:49.450375 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 05 17:08:49 crc kubenswrapper[4753]: I1205 17:08:49.496726 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 05 17:08:49 crc kubenswrapper[4753]: I1205 17:08:49.547378 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 05 17:08:49 crc kubenswrapper[4753]: I1205 17:08:49.551650 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 05 17:08:49 crc kubenswrapper[4753]: I1205 17:08:49.588059 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 05 17:08:49 crc kubenswrapper[4753]: I1205 17:08:49.605098 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 05 17:08:49 crc kubenswrapper[4753]: I1205 17:08:49.625870 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 05 17:08:49 crc kubenswrapper[4753]: I1205 17:08:49.628635 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 05 17:08:49 crc kubenswrapper[4753]: I1205 17:08:49.743180 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 05 17:08:49 crc kubenswrapper[4753]: I1205 17:08:49.753910 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 05 17:08:49 crc kubenswrapper[4753]: I1205 17:08:49.757516 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 05 17:08:49 crc kubenswrapper[4753]: I1205 17:08:49.785424 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 05 17:08:49 crc kubenswrapper[4753]: I1205 17:08:49.861003 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 05 17:08:49 crc kubenswrapper[4753]: I1205 17:08:49.868054 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 05 17:08:49 crc kubenswrapper[4753]: I1205 17:08:49.909345 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 05 17:08:50 crc kubenswrapper[4753]: I1205 17:08:50.041682 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 05 17:08:50 crc kubenswrapper[4753]: I1205 17:08:50.095517 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 05 17:08:50 crc kubenswrapper[4753]: I1205 17:08:50.096447 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 05 17:08:50 crc kubenswrapper[4753]: I1205 17:08:50.152140 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 05 17:08:50 crc kubenswrapper[4753]: I1205 17:08:50.186741 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 05 17:08:50 crc kubenswrapper[4753]: I1205 17:08:50.186829 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 05 17:08:50 crc kubenswrapper[4753]: I1205 17:08:50.191790 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 05 17:08:50 crc kubenswrapper[4753]: I1205 17:08:50.196609 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 05 17:08:50 crc kubenswrapper[4753]: I1205 17:08:50.221061 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 05 17:08:50 crc kubenswrapper[4753]: I1205 17:08:50.266101 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 05 17:08:50 crc kubenswrapper[4753]: I1205 17:08:50.318258 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 05 17:08:50 crc kubenswrapper[4753]: I1205 17:08:50.395127 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 05 17:08:50 crc kubenswrapper[4753]: I1205 17:08:50.397203 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 05 17:08:50 crc kubenswrapper[4753]: I1205 17:08:50.489106 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 05 17:08:50 crc kubenswrapper[4753]: I1205 17:08:50.519800 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 05 17:08:50 crc kubenswrapper[4753]: I1205 17:08:50.557499 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 05 17:08:50 crc kubenswrapper[4753]: I1205 17:08:50.567313 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 05 17:08:50 crc kubenswrapper[4753]: I1205 17:08:50.642501 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 05 17:08:50 crc kubenswrapper[4753]: I1205 17:08:50.679987 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 05 17:08:50 crc kubenswrapper[4753]: I1205 17:08:50.701650 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 05 17:08:50 crc kubenswrapper[4753]: I1205 17:08:50.751107 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 05 17:08:50 crc kubenswrapper[4753]: I1205 17:08:50.805060 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 05 17:08:51 crc kubenswrapper[4753]: I1205 17:08:51.018955 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 05 17:08:51 crc kubenswrapper[4753]: I1205 17:08:51.025118 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 05 17:08:51 crc kubenswrapper[4753]: I1205 17:08:51.033449 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 05 17:08:51 crc kubenswrapper[4753]: I1205 17:08:51.093266 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 05 17:08:51 crc kubenswrapper[4753]: I1205 17:08:51.158557 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 05 17:08:51 crc kubenswrapper[4753]: I1205 17:08:51.208507 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 05 17:08:51 crc kubenswrapper[4753]: I1205 17:08:51.225367 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 05 17:08:51 crc kubenswrapper[4753]: I1205 17:08:51.293624 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 05 17:08:51 crc kubenswrapper[4753]: I1205 17:08:51.310055 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 05 17:08:51 crc kubenswrapper[4753]: I1205 17:08:51.425671 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 05 17:08:51 crc kubenswrapper[4753]: I1205 17:08:51.554479 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 05 17:08:51 crc kubenswrapper[4753]: I1205 17:08:51.579517 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 05 17:08:51 crc kubenswrapper[4753]: I1205 17:08:51.685351 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 05 17:08:51 crc kubenswrapper[4753]: I1205 17:08:51.711624 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 05 17:08:51 crc kubenswrapper[4753]: I1205 17:08:51.721977 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 05 17:08:51 crc kubenswrapper[4753]: I1205 17:08:51.738003 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 05 17:08:51 crc kubenswrapper[4753]: I1205 17:08:51.900621 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 05 17:08:51 crc kubenswrapper[4753]: I1205 17:08:51.985658 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 05 17:08:51 crc kubenswrapper[4753]: I1205 17:08:51.985688 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 05 17:08:52 crc kubenswrapper[4753]: I1205 17:08:52.120006 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 05 17:08:52 crc kubenswrapper[4753]: I1205 17:08:52.253044 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 05 17:08:52 crc kubenswrapper[4753]: I1205 17:08:52.301329 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 05 17:08:52 crc kubenswrapper[4753]: I1205 17:08:52.405925 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 05 17:08:52 crc kubenswrapper[4753]: I1205 17:08:52.407729 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 05 17:08:52 crc kubenswrapper[4753]: I1205 17:08:52.498689 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 05 17:08:52 crc kubenswrapper[4753]: I1205 17:08:52.587419 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 05 17:08:52 crc kubenswrapper[4753]: I1205 17:08:52.617906 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 05 17:08:52 crc kubenswrapper[4753]: I1205 17:08:52.700081 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 05 17:08:52 crc kubenswrapper[4753]: I1205 17:08:52.741921 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 05 17:08:52 crc kubenswrapper[4753]: I1205 17:08:52.769454 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 05 17:08:52 crc kubenswrapper[4753]: I1205 17:08:52.824252 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 05 17:08:52 crc kubenswrapper[4753]: I1205 17:08:52.856247 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 05 17:08:52 crc kubenswrapper[4753]: I1205 17:08:52.869686 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 05 17:08:52 crc kubenswrapper[4753]: I1205 17:08:52.923726 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 05 17:08:52 crc kubenswrapper[4753]: I1205 17:08:52.924357 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 05 17:08:52 crc kubenswrapper[4753]: I1205 17:08:52.927087 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.011732 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.160298 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.194772 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.313036 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.316535 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-7986fd6655-kqdsv"] Dec 05 17:08:53 crc kubenswrapper[4753]: E1205 17:08:53.316848 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c3117f4b-6f3a-4131-9001-d39222e6f268" containerName="oauth-openshift" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.316874 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="c3117f4b-6f3a-4131-9001-d39222e6f268" containerName="oauth-openshift" Dec 05 17:08:53 crc kubenswrapper[4753]: E1205 17:08:53.316900 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a02c0cda-e1a3-4d5e-a64c-0a232d89b057" containerName="installer" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.316913 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="a02c0cda-e1a3-4d5e-a64c-0a232d89b057" containerName="installer" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.317043 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="a02c0cda-e1a3-4d5e-a64c-0a232d89b057" containerName="installer" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.317060 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="c3117f4b-6f3a-4131-9001-d39222e6f268" containerName="oauth-openshift" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.317638 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-7986fd6655-kqdsv" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.320716 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.321234 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.321260 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.321390 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.321414 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.322052 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.322361 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.322628 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.322665 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.323028 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.323562 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.324742 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.332874 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.333735 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.368096 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.481406 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/11b94a29-7460-48ad-ab3d-75e200eb6094-audit-dir\") pod \"oauth-openshift-7986fd6655-kqdsv\" (UID: \"11b94a29-7460-48ad-ab3d-75e200eb6094\") " pod="openshift-authentication/oauth-openshift-7986fd6655-kqdsv" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.481486 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/11b94a29-7460-48ad-ab3d-75e200eb6094-v4-0-config-system-session\") pod \"oauth-openshift-7986fd6655-kqdsv\" (UID: \"11b94a29-7460-48ad-ab3d-75e200eb6094\") " pod="openshift-authentication/oauth-openshift-7986fd6655-kqdsv" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.481806 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/11b94a29-7460-48ad-ab3d-75e200eb6094-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-7986fd6655-kqdsv\" (UID: \"11b94a29-7460-48ad-ab3d-75e200eb6094\") " pod="openshift-authentication/oauth-openshift-7986fd6655-kqdsv" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.481930 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/11b94a29-7460-48ad-ab3d-75e200eb6094-v4-0-config-system-service-ca\") pod \"oauth-openshift-7986fd6655-kqdsv\" (UID: \"11b94a29-7460-48ad-ab3d-75e200eb6094\") " pod="openshift-authentication/oauth-openshift-7986fd6655-kqdsv" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.482058 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/11b94a29-7460-48ad-ab3d-75e200eb6094-v4-0-config-system-serving-cert\") pod \"oauth-openshift-7986fd6655-kqdsv\" (UID: \"11b94a29-7460-48ad-ab3d-75e200eb6094\") " pod="openshift-authentication/oauth-openshift-7986fd6655-kqdsv" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.482101 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/11b94a29-7460-48ad-ab3d-75e200eb6094-v4-0-config-user-template-login\") pod \"oauth-openshift-7986fd6655-kqdsv\" (UID: \"11b94a29-7460-48ad-ab3d-75e200eb6094\") " pod="openshift-authentication/oauth-openshift-7986fd6655-kqdsv" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.482185 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/11b94a29-7460-48ad-ab3d-75e200eb6094-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-7986fd6655-kqdsv\" (UID: \"11b94a29-7460-48ad-ab3d-75e200eb6094\") " pod="openshift-authentication/oauth-openshift-7986fd6655-kqdsv" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.482332 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dmn4n\" (UniqueName: \"kubernetes.io/projected/11b94a29-7460-48ad-ab3d-75e200eb6094-kube-api-access-dmn4n\") pod \"oauth-openshift-7986fd6655-kqdsv\" (UID: \"11b94a29-7460-48ad-ab3d-75e200eb6094\") " pod="openshift-authentication/oauth-openshift-7986fd6655-kqdsv" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.482398 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/11b94a29-7460-48ad-ab3d-75e200eb6094-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-7986fd6655-kqdsv\" (UID: \"11b94a29-7460-48ad-ab3d-75e200eb6094\") " pod="openshift-authentication/oauth-openshift-7986fd6655-kqdsv" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.482540 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/11b94a29-7460-48ad-ab3d-75e200eb6094-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-7986fd6655-kqdsv\" (UID: \"11b94a29-7460-48ad-ab3d-75e200eb6094\") " pod="openshift-authentication/oauth-openshift-7986fd6655-kqdsv" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.482605 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/11b94a29-7460-48ad-ab3d-75e200eb6094-v4-0-config-system-cliconfig\") pod \"oauth-openshift-7986fd6655-kqdsv\" (UID: \"11b94a29-7460-48ad-ab3d-75e200eb6094\") " pod="openshift-authentication/oauth-openshift-7986fd6655-kqdsv" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.482672 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/11b94a29-7460-48ad-ab3d-75e200eb6094-v4-0-config-user-template-error\") pod \"oauth-openshift-7986fd6655-kqdsv\" (UID: \"11b94a29-7460-48ad-ab3d-75e200eb6094\") " pod="openshift-authentication/oauth-openshift-7986fd6655-kqdsv" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.482746 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/11b94a29-7460-48ad-ab3d-75e200eb6094-audit-policies\") pod \"oauth-openshift-7986fd6655-kqdsv\" (UID: \"11b94a29-7460-48ad-ab3d-75e200eb6094\") " pod="openshift-authentication/oauth-openshift-7986fd6655-kqdsv" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.482810 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/11b94a29-7460-48ad-ab3d-75e200eb6094-v4-0-config-system-router-certs\") pod \"oauth-openshift-7986fd6655-kqdsv\" (UID: \"11b94a29-7460-48ad-ab3d-75e200eb6094\") " pod="openshift-authentication/oauth-openshift-7986fd6655-kqdsv" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.484419 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.485710 4753 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.551322 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.574112 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.583386 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/11b94a29-7460-48ad-ab3d-75e200eb6094-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-7986fd6655-kqdsv\" (UID: \"11b94a29-7460-48ad-ab3d-75e200eb6094\") " pod="openshift-authentication/oauth-openshift-7986fd6655-kqdsv" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.583462 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/11b94a29-7460-48ad-ab3d-75e200eb6094-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-7986fd6655-kqdsv\" (UID: \"11b94a29-7460-48ad-ab3d-75e200eb6094\") " pod="openshift-authentication/oauth-openshift-7986fd6655-kqdsv" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.583484 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/11b94a29-7460-48ad-ab3d-75e200eb6094-v4-0-config-system-cliconfig\") pod \"oauth-openshift-7986fd6655-kqdsv\" (UID: \"11b94a29-7460-48ad-ab3d-75e200eb6094\") " pod="openshift-authentication/oauth-openshift-7986fd6655-kqdsv" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.583522 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/11b94a29-7460-48ad-ab3d-75e200eb6094-v4-0-config-user-template-error\") pod \"oauth-openshift-7986fd6655-kqdsv\" (UID: \"11b94a29-7460-48ad-ab3d-75e200eb6094\") " pod="openshift-authentication/oauth-openshift-7986fd6655-kqdsv" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.583552 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/11b94a29-7460-48ad-ab3d-75e200eb6094-audit-policies\") pod \"oauth-openshift-7986fd6655-kqdsv\" (UID: \"11b94a29-7460-48ad-ab3d-75e200eb6094\") " pod="openshift-authentication/oauth-openshift-7986fd6655-kqdsv" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.583573 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/11b94a29-7460-48ad-ab3d-75e200eb6094-v4-0-config-system-router-certs\") pod \"oauth-openshift-7986fd6655-kqdsv\" (UID: \"11b94a29-7460-48ad-ab3d-75e200eb6094\") " pod="openshift-authentication/oauth-openshift-7986fd6655-kqdsv" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.583600 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/11b94a29-7460-48ad-ab3d-75e200eb6094-audit-dir\") pod \"oauth-openshift-7986fd6655-kqdsv\" (UID: \"11b94a29-7460-48ad-ab3d-75e200eb6094\") " pod="openshift-authentication/oauth-openshift-7986fd6655-kqdsv" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.583617 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/11b94a29-7460-48ad-ab3d-75e200eb6094-v4-0-config-system-session\") pod \"oauth-openshift-7986fd6655-kqdsv\" (UID: \"11b94a29-7460-48ad-ab3d-75e200eb6094\") " pod="openshift-authentication/oauth-openshift-7986fd6655-kqdsv" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.583638 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/11b94a29-7460-48ad-ab3d-75e200eb6094-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-7986fd6655-kqdsv\" (UID: \"11b94a29-7460-48ad-ab3d-75e200eb6094\") " pod="openshift-authentication/oauth-openshift-7986fd6655-kqdsv" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.583657 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/11b94a29-7460-48ad-ab3d-75e200eb6094-v4-0-config-system-service-ca\") pod \"oauth-openshift-7986fd6655-kqdsv\" (UID: \"11b94a29-7460-48ad-ab3d-75e200eb6094\") " pod="openshift-authentication/oauth-openshift-7986fd6655-kqdsv" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.583681 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/11b94a29-7460-48ad-ab3d-75e200eb6094-v4-0-config-system-serving-cert\") pod \"oauth-openshift-7986fd6655-kqdsv\" (UID: \"11b94a29-7460-48ad-ab3d-75e200eb6094\") " pod="openshift-authentication/oauth-openshift-7986fd6655-kqdsv" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.583699 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/11b94a29-7460-48ad-ab3d-75e200eb6094-v4-0-config-user-template-login\") pod \"oauth-openshift-7986fd6655-kqdsv\" (UID: \"11b94a29-7460-48ad-ab3d-75e200eb6094\") " pod="openshift-authentication/oauth-openshift-7986fd6655-kqdsv" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.583719 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/11b94a29-7460-48ad-ab3d-75e200eb6094-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-7986fd6655-kqdsv\" (UID: \"11b94a29-7460-48ad-ab3d-75e200eb6094\") " pod="openshift-authentication/oauth-openshift-7986fd6655-kqdsv" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.583745 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dmn4n\" (UniqueName: \"kubernetes.io/projected/11b94a29-7460-48ad-ab3d-75e200eb6094-kube-api-access-dmn4n\") pod \"oauth-openshift-7986fd6655-kqdsv\" (UID: \"11b94a29-7460-48ad-ab3d-75e200eb6094\") " pod="openshift-authentication/oauth-openshift-7986fd6655-kqdsv" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.584427 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/11b94a29-7460-48ad-ab3d-75e200eb6094-audit-dir\") pod \"oauth-openshift-7986fd6655-kqdsv\" (UID: \"11b94a29-7460-48ad-ab3d-75e200eb6094\") " pod="openshift-authentication/oauth-openshift-7986fd6655-kqdsv" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.584932 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/11b94a29-7460-48ad-ab3d-75e200eb6094-v4-0-config-system-cliconfig\") pod \"oauth-openshift-7986fd6655-kqdsv\" (UID: \"11b94a29-7460-48ad-ab3d-75e200eb6094\") " pod="openshift-authentication/oauth-openshift-7986fd6655-kqdsv" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.584935 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/11b94a29-7460-48ad-ab3d-75e200eb6094-audit-policies\") pod \"oauth-openshift-7986fd6655-kqdsv\" (UID: \"11b94a29-7460-48ad-ab3d-75e200eb6094\") " pod="openshift-authentication/oauth-openshift-7986fd6655-kqdsv" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.586313 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/11b94a29-7460-48ad-ab3d-75e200eb6094-v4-0-config-system-service-ca\") pod \"oauth-openshift-7986fd6655-kqdsv\" (UID: \"11b94a29-7460-48ad-ab3d-75e200eb6094\") " pod="openshift-authentication/oauth-openshift-7986fd6655-kqdsv" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.586600 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/11b94a29-7460-48ad-ab3d-75e200eb6094-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-7986fd6655-kqdsv\" (UID: \"11b94a29-7460-48ad-ab3d-75e200eb6094\") " pod="openshift-authentication/oauth-openshift-7986fd6655-kqdsv" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.591955 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/11b94a29-7460-48ad-ab3d-75e200eb6094-v4-0-config-user-template-error\") pod \"oauth-openshift-7986fd6655-kqdsv\" (UID: \"11b94a29-7460-48ad-ab3d-75e200eb6094\") " pod="openshift-authentication/oauth-openshift-7986fd6655-kqdsv" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.591982 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/11b94a29-7460-48ad-ab3d-75e200eb6094-v4-0-config-system-router-certs\") pod \"oauth-openshift-7986fd6655-kqdsv\" (UID: \"11b94a29-7460-48ad-ab3d-75e200eb6094\") " pod="openshift-authentication/oauth-openshift-7986fd6655-kqdsv" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.593252 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/11b94a29-7460-48ad-ab3d-75e200eb6094-v4-0-config-system-serving-cert\") pod \"oauth-openshift-7986fd6655-kqdsv\" (UID: \"11b94a29-7460-48ad-ab3d-75e200eb6094\") " pod="openshift-authentication/oauth-openshift-7986fd6655-kqdsv" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.593324 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/11b94a29-7460-48ad-ab3d-75e200eb6094-v4-0-config-user-template-login\") pod \"oauth-openshift-7986fd6655-kqdsv\" (UID: \"11b94a29-7460-48ad-ab3d-75e200eb6094\") " pod="openshift-authentication/oauth-openshift-7986fd6655-kqdsv" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.594569 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/11b94a29-7460-48ad-ab3d-75e200eb6094-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-7986fd6655-kqdsv\" (UID: \"11b94a29-7460-48ad-ab3d-75e200eb6094\") " pod="openshift-authentication/oauth-openshift-7986fd6655-kqdsv" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.598630 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/11b94a29-7460-48ad-ab3d-75e200eb6094-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-7986fd6655-kqdsv\" (UID: \"11b94a29-7460-48ad-ab3d-75e200eb6094\") " pod="openshift-authentication/oauth-openshift-7986fd6655-kqdsv" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.601433 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/11b94a29-7460-48ad-ab3d-75e200eb6094-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-7986fd6655-kqdsv\" (UID: \"11b94a29-7460-48ad-ab3d-75e200eb6094\") " pod="openshift-authentication/oauth-openshift-7986fd6655-kqdsv" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.606104 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/11b94a29-7460-48ad-ab3d-75e200eb6094-v4-0-config-system-session\") pod \"oauth-openshift-7986fd6655-kqdsv\" (UID: \"11b94a29-7460-48ad-ab3d-75e200eb6094\") " pod="openshift-authentication/oauth-openshift-7986fd6655-kqdsv" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.612818 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dmn4n\" (UniqueName: \"kubernetes.io/projected/11b94a29-7460-48ad-ab3d-75e200eb6094-kube-api-access-dmn4n\") pod \"oauth-openshift-7986fd6655-kqdsv\" (UID: \"11b94a29-7460-48ad-ab3d-75e200eb6094\") " pod="openshift-authentication/oauth-openshift-7986fd6655-kqdsv" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.646029 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-7986fd6655-kqdsv" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.662031 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.820613 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.832280 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.866470 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.881436 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.884215 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.904616 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.906032 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.932737 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.962476 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 05 17:08:53 crc kubenswrapper[4753]: I1205 17:08:53.982507 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 05 17:08:54 crc kubenswrapper[4753]: I1205 17:08:54.032015 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 05 17:08:54 crc kubenswrapper[4753]: I1205 17:08:54.044794 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 05 17:08:54 crc kubenswrapper[4753]: I1205 17:08:54.109167 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 05 17:08:54 crc kubenswrapper[4753]: I1205 17:08:54.112317 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 05 17:08:54 crc kubenswrapper[4753]: I1205 17:08:54.159418 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 05 17:08:54 crc kubenswrapper[4753]: I1205 17:08:54.239293 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 05 17:08:54 crc kubenswrapper[4753]: I1205 17:08:54.264601 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 05 17:08:54 crc kubenswrapper[4753]: I1205 17:08:54.315313 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 05 17:08:54 crc kubenswrapper[4753]: I1205 17:08:54.390040 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 05 17:08:54 crc kubenswrapper[4753]: I1205 17:08:54.399278 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 05 17:08:54 crc kubenswrapper[4753]: I1205 17:08:54.492818 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 05 17:08:54 crc kubenswrapper[4753]: I1205 17:08:54.501881 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 05 17:08:54 crc kubenswrapper[4753]: I1205 17:08:54.510915 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 05 17:08:54 crc kubenswrapper[4753]: I1205 17:08:54.597422 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 05 17:08:54 crc kubenswrapper[4753]: I1205 17:08:54.634926 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 05 17:08:54 crc kubenswrapper[4753]: I1205 17:08:54.635396 4753 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 05 17:08:54 crc kubenswrapper[4753]: I1205 17:08:54.726875 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 05 17:08:54 crc kubenswrapper[4753]: I1205 17:08:54.809667 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 05 17:08:54 crc kubenswrapper[4753]: I1205 17:08:54.887103 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 05 17:08:54 crc kubenswrapper[4753]: I1205 17:08:54.970741 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 05 17:08:54 crc kubenswrapper[4753]: I1205 17:08:54.977127 4753 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Dec 05 17:08:54 crc kubenswrapper[4753]: I1205 17:08:54.977219 4753 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Dec 05 17:08:54 crc kubenswrapper[4753]: I1205 17:08:54.977324 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 17:08:54 crc kubenswrapper[4753]: I1205 17:08:54.978359 4753 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="kube-controller-manager" containerStatusID={"Type":"cri-o","ID":"b888da62eca45b4de4ea434f4382414730b7b2c92618756657b8158f45303700"} pod="openshift-kube-controller-manager/kube-controller-manager-crc" containerMessage="Container kube-controller-manager failed startup probe, will be restarted" Dec 05 17:08:54 crc kubenswrapper[4753]: I1205 17:08:54.978537 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" containerID="cri-o://b888da62eca45b4de4ea434f4382414730b7b2c92618756657b8158f45303700" gracePeriod=30 Dec 05 17:08:55 crc kubenswrapper[4753]: I1205 17:08:55.039481 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 05 17:08:55 crc kubenswrapper[4753]: I1205 17:08:55.043255 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 05 17:08:55 crc kubenswrapper[4753]: I1205 17:08:55.066223 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 05 17:08:55 crc kubenswrapper[4753]: I1205 17:08:55.149402 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 05 17:08:55 crc kubenswrapper[4753]: I1205 17:08:55.216632 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 05 17:08:55 crc kubenswrapper[4753]: I1205 17:08:55.238212 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 05 17:08:55 crc kubenswrapper[4753]: I1205 17:08:55.242576 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 05 17:08:55 crc kubenswrapper[4753]: I1205 17:08:55.271253 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 05 17:08:55 crc kubenswrapper[4753]: I1205 17:08:55.453508 4753 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 05 17:08:55 crc kubenswrapper[4753]: I1205 17:08:55.453855 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://085707f6811b95d4c230f0788450b195d839891d700b60a1241d856c184bc35f" gracePeriod=5 Dec 05 17:08:55 crc kubenswrapper[4753]: I1205 17:08:55.501735 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 05 17:08:55 crc kubenswrapper[4753]: I1205 17:08:55.675433 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 05 17:08:55 crc kubenswrapper[4753]: I1205 17:08:55.743953 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 05 17:08:55 crc kubenswrapper[4753]: I1205 17:08:55.782649 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 05 17:08:55 crc kubenswrapper[4753]: I1205 17:08:55.867880 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 05 17:08:55 crc kubenswrapper[4753]: I1205 17:08:55.926700 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 05 17:08:55 crc kubenswrapper[4753]: I1205 17:08:55.974559 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 05 17:08:55 crc kubenswrapper[4753]: I1205 17:08:55.976015 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 05 17:08:56 crc kubenswrapper[4753]: I1205 17:08:56.003743 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 05 17:08:56 crc kubenswrapper[4753]: I1205 17:08:56.016185 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 05 17:08:56 crc kubenswrapper[4753]: I1205 17:08:56.022310 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 05 17:08:56 crc kubenswrapper[4753]: I1205 17:08:56.150036 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 05 17:08:56 crc kubenswrapper[4753]: I1205 17:08:56.203326 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 05 17:08:56 crc kubenswrapper[4753]: I1205 17:08:56.405557 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 05 17:08:56 crc kubenswrapper[4753]: I1205 17:08:56.414067 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 05 17:08:56 crc kubenswrapper[4753]: I1205 17:08:56.442292 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 05 17:08:56 crc kubenswrapper[4753]: I1205 17:08:56.464642 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 05 17:08:56 crc kubenswrapper[4753]: I1205 17:08:56.760368 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 05 17:08:56 crc kubenswrapper[4753]: I1205 17:08:56.778874 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 05 17:08:56 crc kubenswrapper[4753]: I1205 17:08:56.975557 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 05 17:08:57 crc kubenswrapper[4753]: I1205 17:08:57.143120 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 05 17:08:57 crc kubenswrapper[4753]: I1205 17:08:57.170359 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 05 17:08:57 crc kubenswrapper[4753]: I1205 17:08:57.201741 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 05 17:08:57 crc kubenswrapper[4753]: I1205 17:08:57.403487 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 05 17:08:57 crc kubenswrapper[4753]: I1205 17:08:57.477118 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 05 17:08:57 crc kubenswrapper[4753]: I1205 17:08:57.520752 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 05 17:08:57 crc kubenswrapper[4753]: I1205 17:08:57.583801 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 05 17:08:57 crc kubenswrapper[4753]: I1205 17:08:57.678017 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 05 17:08:57 crc kubenswrapper[4753]: I1205 17:08:57.924490 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 05 17:08:57 crc kubenswrapper[4753]: I1205 17:08:57.932756 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 05 17:08:58 crc kubenswrapper[4753]: I1205 17:08:58.051958 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 05 17:08:58 crc kubenswrapper[4753]: I1205 17:08:58.471599 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 05 17:08:58 crc kubenswrapper[4753]: I1205 17:08:58.682525 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-7986fd6655-kqdsv"] Dec 05 17:08:58 crc kubenswrapper[4753]: I1205 17:08:58.841355 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 05 17:08:58 crc kubenswrapper[4753]: I1205 17:08:58.891106 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-7986fd6655-kqdsv"] Dec 05 17:08:58 crc kubenswrapper[4753]: I1205 17:08:58.967263 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 05 17:08:59 crc kubenswrapper[4753]: I1205 17:08:59.085416 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-7986fd6655-kqdsv" event={"ID":"11b94a29-7460-48ad-ab3d-75e200eb6094","Type":"ContainerStarted","Data":"1a3c0277ebdd7e1f944f4f1a3814f5af28cf7adddb5a565f84c47af5b178ee6f"} Dec 05 17:08:59 crc kubenswrapper[4753]: I1205 17:08:59.136488 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 05 17:08:59 crc kubenswrapper[4753]: I1205 17:08:59.245849 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 05 17:08:59 crc kubenswrapper[4753]: I1205 17:08:59.575371 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 05 17:08:59 crc kubenswrapper[4753]: I1205 17:08:59.610470 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 05 17:09:00 crc kubenswrapper[4753]: I1205 17:09:00.096528 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-7986fd6655-kqdsv" event={"ID":"11b94a29-7460-48ad-ab3d-75e200eb6094","Type":"ContainerStarted","Data":"b64d44b2fd186086ba1e0c53b7e9c86dcd8af8bab6c8cd5efc00c65bc4d4071e"} Dec 05 17:09:00 crc kubenswrapper[4753]: I1205 17:09:00.098127 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-7986fd6655-kqdsv" Dec 05 17:09:00 crc kubenswrapper[4753]: I1205 17:09:00.105611 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-7986fd6655-kqdsv" Dec 05 17:09:00 crc kubenswrapper[4753]: I1205 17:09:00.130363 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-7986fd6655-kqdsv" podStartSLOduration=51.130337548 podStartE2EDuration="51.130337548s" podCreationTimestamp="2025-12-05 17:08:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:09:00.125188966 +0000 UTC m=+278.628295972" watchObservedRunningTime="2025-12-05 17:09:00.130337548 +0000 UTC m=+278.633444554" Dec 05 17:09:00 crc kubenswrapper[4753]: I1205 17:09:00.442969 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 05 17:09:00 crc kubenswrapper[4753]: I1205 17:09:00.566349 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 05 17:09:01 crc kubenswrapper[4753]: I1205 17:09:01.064417 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 05 17:09:01 crc kubenswrapper[4753]: I1205 17:09:01.065095 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 17:09:01 crc kubenswrapper[4753]: I1205 17:09:01.104748 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 05 17:09:01 crc kubenswrapper[4753]: I1205 17:09:01.104818 4753 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="085707f6811b95d4c230f0788450b195d839891d700b60a1241d856c184bc35f" exitCode=137 Dec 05 17:09:01 crc kubenswrapper[4753]: I1205 17:09:01.104955 4753 scope.go:117] "RemoveContainer" containerID="085707f6811b95d4c230f0788450b195d839891d700b60a1241d856c184bc35f" Dec 05 17:09:01 crc kubenswrapper[4753]: I1205 17:09:01.105008 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 17:09:01 crc kubenswrapper[4753]: I1205 17:09:01.114271 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 05 17:09:01 crc kubenswrapper[4753]: I1205 17:09:01.114334 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 05 17:09:01 crc kubenswrapper[4753]: I1205 17:09:01.114380 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 05 17:09:01 crc kubenswrapper[4753]: I1205 17:09:01.114390 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 17:09:01 crc kubenswrapper[4753]: I1205 17:09:01.114451 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 05 17:09:01 crc kubenswrapper[4753]: I1205 17:09:01.114461 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 17:09:01 crc kubenswrapper[4753]: I1205 17:09:01.114522 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 17:09:01 crc kubenswrapper[4753]: I1205 17:09:01.114522 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 05 17:09:01 crc kubenswrapper[4753]: I1205 17:09:01.114552 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 17:09:01 crc kubenswrapper[4753]: I1205 17:09:01.115352 4753 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Dec 05 17:09:01 crc kubenswrapper[4753]: I1205 17:09:01.115387 4753 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Dec 05 17:09:01 crc kubenswrapper[4753]: I1205 17:09:01.115407 4753 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 05 17:09:01 crc kubenswrapper[4753]: I1205 17:09:01.115425 4753 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Dec 05 17:09:01 crc kubenswrapper[4753]: I1205 17:09:01.121337 4753 scope.go:117] "RemoveContainer" containerID="085707f6811b95d4c230f0788450b195d839891d700b60a1241d856c184bc35f" Dec 05 17:09:01 crc kubenswrapper[4753]: E1205 17:09:01.121951 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"085707f6811b95d4c230f0788450b195d839891d700b60a1241d856c184bc35f\": container with ID starting with 085707f6811b95d4c230f0788450b195d839891d700b60a1241d856c184bc35f not found: ID does not exist" containerID="085707f6811b95d4c230f0788450b195d839891d700b60a1241d856c184bc35f" Dec 05 17:09:01 crc kubenswrapper[4753]: I1205 17:09:01.122019 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"085707f6811b95d4c230f0788450b195d839891d700b60a1241d856c184bc35f"} err="failed to get container status \"085707f6811b95d4c230f0788450b195d839891d700b60a1241d856c184bc35f\": rpc error: code = NotFound desc = could not find container \"085707f6811b95d4c230f0788450b195d839891d700b60a1241d856c184bc35f\": container with ID starting with 085707f6811b95d4c230f0788450b195d839891d700b60a1241d856c184bc35f not found: ID does not exist" Dec 05 17:09:01 crc kubenswrapper[4753]: I1205 17:09:01.126199 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 17:09:01 crc kubenswrapper[4753]: I1205 17:09:01.217541 4753 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 05 17:09:01 crc kubenswrapper[4753]: I1205 17:09:01.735477 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Dec 05 17:09:19 crc kubenswrapper[4753]: I1205 17:09:19.243861 4753 generic.go:334] "Generic (PLEG): container finished" podID="03fd3f93-fb41-4e32-8276-416c40f2b9a7" containerID="5be7c5960ffb2fc15b84d5390bf5773ad1ba4843c0fb43e769be20a63f92d632" exitCode=0 Dec 05 17:09:19 crc kubenswrapper[4753]: I1205 17:09:19.243977 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-s87ds" event={"ID":"03fd3f93-fb41-4e32-8276-416c40f2b9a7","Type":"ContainerDied","Data":"5be7c5960ffb2fc15b84d5390bf5773ad1ba4843c0fb43e769be20a63f92d632"} Dec 05 17:09:19 crc kubenswrapper[4753]: I1205 17:09:19.245121 4753 scope.go:117] "RemoveContainer" containerID="5be7c5960ffb2fc15b84d5390bf5773ad1ba4843c0fb43e769be20a63f92d632" Dec 05 17:09:20 crc kubenswrapper[4753]: I1205 17:09:20.258702 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-s87ds" event={"ID":"03fd3f93-fb41-4e32-8276-416c40f2b9a7","Type":"ContainerStarted","Data":"eb799446224d7545cfa481b5721a5a1747e0fdfbc7bcd537eb6ec235acfb30a1"} Dec 05 17:09:20 crc kubenswrapper[4753]: I1205 17:09:20.259283 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-s87ds" Dec 05 17:09:20 crc kubenswrapper[4753]: I1205 17:09:20.261510 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-s87ds" Dec 05 17:09:25 crc kubenswrapper[4753]: I1205 17:09:25.307311 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/1.log" Dec 05 17:09:25 crc kubenswrapper[4753]: I1205 17:09:25.309683 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 05 17:09:25 crc kubenswrapper[4753]: I1205 17:09:25.309725 4753 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="b888da62eca45b4de4ea434f4382414730b7b2c92618756657b8158f45303700" exitCode=137 Dec 05 17:09:25 crc kubenswrapper[4753]: I1205 17:09:25.309763 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"b888da62eca45b4de4ea434f4382414730b7b2c92618756657b8158f45303700"} Dec 05 17:09:25 crc kubenswrapper[4753]: I1205 17:09:25.309807 4753 scope.go:117] "RemoveContainer" containerID="0f6e27e109eeb52dae4e4b35bdf84e78f0786fc8d15c2498dd8d8c70c3d64e4d" Dec 05 17:09:26 crc kubenswrapper[4753]: I1205 17:09:26.319092 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/1.log" Dec 05 17:09:26 crc kubenswrapper[4753]: I1205 17:09:26.320341 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"77c53476753caad572b8d6b1ad4be5a949e305385ece8ea03aca3b40a7e00992"} Dec 05 17:09:27 crc kubenswrapper[4753]: I1205 17:09:27.760428 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 05 17:09:33 crc kubenswrapper[4753]: I1205 17:09:33.766477 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 17:09:34 crc kubenswrapper[4753]: I1205 17:09:34.976683 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 17:09:34 crc kubenswrapper[4753]: I1205 17:09:34.983962 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 17:09:35 crc kubenswrapper[4753]: I1205 17:09:35.392854 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 17:09:44 crc kubenswrapper[4753]: I1205 17:09:44.418112 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-b7687"] Dec 05 17:09:44 crc kubenswrapper[4753]: I1205 17:09:44.419004 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-b7687" podUID="915c88c7-cac2-48b3-ab7a-6e23e7240465" containerName="controller-manager" containerID="cri-o://aca4075916601dd74461fb12a7e864057dce2809c67c6d004bc051852e118544" gracePeriod=30 Dec 05 17:09:44 crc kubenswrapper[4753]: I1205 17:09:44.434991 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-7zfzd"] Dec 05 17:09:44 crc kubenswrapper[4753]: I1205 17:09:44.435970 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-7zfzd" podUID="0aa048c9-05f2-41f3-ad26-c88d5c98c453" containerName="route-controller-manager" containerID="cri-o://08081215dda0c7592b9709febd23a757a76cff31840f88cf1829bf956a6a6a08" gracePeriod=30 Dec 05 17:09:44 crc kubenswrapper[4753]: I1205 17:09:44.861192 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-7zfzd" Dec 05 17:09:44 crc kubenswrapper[4753]: I1205 17:09:44.903469 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-b7687" Dec 05 17:09:45 crc kubenswrapper[4753]: I1205 17:09:45.030540 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0aa048c9-05f2-41f3-ad26-c88d5c98c453-config\") pod \"0aa048c9-05f2-41f3-ad26-c88d5c98c453\" (UID: \"0aa048c9-05f2-41f3-ad26-c88d5c98c453\") " Dec 05 17:09:45 crc kubenswrapper[4753]: I1205 17:09:45.031039 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/915c88c7-cac2-48b3-ab7a-6e23e7240465-serving-cert\") pod \"915c88c7-cac2-48b3-ab7a-6e23e7240465\" (UID: \"915c88c7-cac2-48b3-ab7a-6e23e7240465\") " Dec 05 17:09:45 crc kubenswrapper[4753]: I1205 17:09:45.031233 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4p49w\" (UniqueName: \"kubernetes.io/projected/0aa048c9-05f2-41f3-ad26-c88d5c98c453-kube-api-access-4p49w\") pod \"0aa048c9-05f2-41f3-ad26-c88d5c98c453\" (UID: \"0aa048c9-05f2-41f3-ad26-c88d5c98c453\") " Dec 05 17:09:45 crc kubenswrapper[4753]: I1205 17:09:45.031294 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/915c88c7-cac2-48b3-ab7a-6e23e7240465-config\") pod \"915c88c7-cac2-48b3-ab7a-6e23e7240465\" (UID: \"915c88c7-cac2-48b3-ab7a-6e23e7240465\") " Dec 05 17:09:45 crc kubenswrapper[4753]: I1205 17:09:45.031361 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/915c88c7-cac2-48b3-ab7a-6e23e7240465-client-ca\") pod \"915c88c7-cac2-48b3-ab7a-6e23e7240465\" (UID: \"915c88c7-cac2-48b3-ab7a-6e23e7240465\") " Dec 05 17:09:45 crc kubenswrapper[4753]: I1205 17:09:45.031402 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-48llr\" (UniqueName: \"kubernetes.io/projected/915c88c7-cac2-48b3-ab7a-6e23e7240465-kube-api-access-48llr\") pod \"915c88c7-cac2-48b3-ab7a-6e23e7240465\" (UID: \"915c88c7-cac2-48b3-ab7a-6e23e7240465\") " Dec 05 17:09:45 crc kubenswrapper[4753]: I1205 17:09:45.031428 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0aa048c9-05f2-41f3-ad26-c88d5c98c453-client-ca\") pod \"0aa048c9-05f2-41f3-ad26-c88d5c98c453\" (UID: \"0aa048c9-05f2-41f3-ad26-c88d5c98c453\") " Dec 05 17:09:45 crc kubenswrapper[4753]: I1205 17:09:45.031458 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0aa048c9-05f2-41f3-ad26-c88d5c98c453-serving-cert\") pod \"0aa048c9-05f2-41f3-ad26-c88d5c98c453\" (UID: \"0aa048c9-05f2-41f3-ad26-c88d5c98c453\") " Dec 05 17:09:45 crc kubenswrapper[4753]: I1205 17:09:45.031500 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/915c88c7-cac2-48b3-ab7a-6e23e7240465-proxy-ca-bundles\") pod \"915c88c7-cac2-48b3-ab7a-6e23e7240465\" (UID: \"915c88c7-cac2-48b3-ab7a-6e23e7240465\") " Dec 05 17:09:45 crc kubenswrapper[4753]: I1205 17:09:45.032216 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0aa048c9-05f2-41f3-ad26-c88d5c98c453-config" (OuterVolumeSpecName: "config") pod "0aa048c9-05f2-41f3-ad26-c88d5c98c453" (UID: "0aa048c9-05f2-41f3-ad26-c88d5c98c453"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:09:45 crc kubenswrapper[4753]: I1205 17:09:45.032246 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0aa048c9-05f2-41f3-ad26-c88d5c98c453-client-ca" (OuterVolumeSpecName: "client-ca") pod "0aa048c9-05f2-41f3-ad26-c88d5c98c453" (UID: "0aa048c9-05f2-41f3-ad26-c88d5c98c453"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:09:45 crc kubenswrapper[4753]: I1205 17:09:45.032265 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/915c88c7-cac2-48b3-ab7a-6e23e7240465-client-ca" (OuterVolumeSpecName: "client-ca") pod "915c88c7-cac2-48b3-ab7a-6e23e7240465" (UID: "915c88c7-cac2-48b3-ab7a-6e23e7240465"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:09:45 crc kubenswrapper[4753]: I1205 17:09:45.032349 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/915c88c7-cac2-48b3-ab7a-6e23e7240465-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "915c88c7-cac2-48b3-ab7a-6e23e7240465" (UID: "915c88c7-cac2-48b3-ab7a-6e23e7240465"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:09:45 crc kubenswrapper[4753]: I1205 17:09:45.032344 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/915c88c7-cac2-48b3-ab7a-6e23e7240465-config" (OuterVolumeSpecName: "config") pod "915c88c7-cac2-48b3-ab7a-6e23e7240465" (UID: "915c88c7-cac2-48b3-ab7a-6e23e7240465"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:09:45 crc kubenswrapper[4753]: I1205 17:09:45.260071 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/915c88c7-cac2-48b3-ab7a-6e23e7240465-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "915c88c7-cac2-48b3-ab7a-6e23e7240465" (UID: "915c88c7-cac2-48b3-ab7a-6e23e7240465"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:09:45 crc kubenswrapper[4753]: I1205 17:09:45.260097 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0aa048c9-05f2-41f3-ad26-c88d5c98c453-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0aa048c9-05f2-41f3-ad26-c88d5c98c453" (UID: "0aa048c9-05f2-41f3-ad26-c88d5c98c453"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:09:45 crc kubenswrapper[4753]: I1205 17:09:45.260071 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/915c88c7-cac2-48b3-ab7a-6e23e7240465-kube-api-access-48llr" (OuterVolumeSpecName: "kube-api-access-48llr") pod "915c88c7-cac2-48b3-ab7a-6e23e7240465" (UID: "915c88c7-cac2-48b3-ab7a-6e23e7240465"). InnerVolumeSpecName "kube-api-access-48llr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:09:45 crc kubenswrapper[4753]: I1205 17:09:45.260648 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-48llr\" (UniqueName: \"kubernetes.io/projected/915c88c7-cac2-48b3-ab7a-6e23e7240465-kube-api-access-48llr\") on node \"crc\" DevicePath \"\"" Dec 05 17:09:45 crc kubenswrapper[4753]: I1205 17:09:45.260693 4753 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0aa048c9-05f2-41f3-ad26-c88d5c98c453-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 17:09:45 crc kubenswrapper[4753]: I1205 17:09:45.260710 4753 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0aa048c9-05f2-41f3-ad26-c88d5c98c453-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 17:09:45 crc kubenswrapper[4753]: I1205 17:09:45.260721 4753 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/915c88c7-cac2-48b3-ab7a-6e23e7240465-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 05 17:09:45 crc kubenswrapper[4753]: I1205 17:09:45.260733 4753 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0aa048c9-05f2-41f3-ad26-c88d5c98c453-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:09:45 crc kubenswrapper[4753]: I1205 17:09:45.260746 4753 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/915c88c7-cac2-48b3-ab7a-6e23e7240465-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 17:09:45 crc kubenswrapper[4753]: I1205 17:09:45.260756 4753 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/915c88c7-cac2-48b3-ab7a-6e23e7240465-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:09:45 crc kubenswrapper[4753]: I1205 17:09:45.260770 4753 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/915c88c7-cac2-48b3-ab7a-6e23e7240465-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 17:09:45 crc kubenswrapper[4753]: I1205 17:09:45.261363 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0aa048c9-05f2-41f3-ad26-c88d5c98c453-kube-api-access-4p49w" (OuterVolumeSpecName: "kube-api-access-4p49w") pod "0aa048c9-05f2-41f3-ad26-c88d5c98c453" (UID: "0aa048c9-05f2-41f3-ad26-c88d5c98c453"). InnerVolumeSpecName "kube-api-access-4p49w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:09:45 crc kubenswrapper[4753]: I1205 17:09:45.362921 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4p49w\" (UniqueName: \"kubernetes.io/projected/0aa048c9-05f2-41f3-ad26-c88d5c98c453-kube-api-access-4p49w\") on node \"crc\" DevicePath \"\"" Dec 05 17:09:45 crc kubenswrapper[4753]: I1205 17:09:45.456965 4753 generic.go:334] "Generic (PLEG): container finished" podID="0aa048c9-05f2-41f3-ad26-c88d5c98c453" containerID="08081215dda0c7592b9709febd23a757a76cff31840f88cf1829bf956a6a6a08" exitCode=0 Dec 05 17:09:45 crc kubenswrapper[4753]: I1205 17:09:45.457027 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-7zfzd" event={"ID":"0aa048c9-05f2-41f3-ad26-c88d5c98c453","Type":"ContainerDied","Data":"08081215dda0c7592b9709febd23a757a76cff31840f88cf1829bf956a6a6a08"} Dec 05 17:09:45 crc kubenswrapper[4753]: I1205 17:09:45.457101 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-7zfzd" Dec 05 17:09:45 crc kubenswrapper[4753]: I1205 17:09:45.457137 4753 scope.go:117] "RemoveContainer" containerID="08081215dda0c7592b9709febd23a757a76cff31840f88cf1829bf956a6a6a08" Dec 05 17:09:45 crc kubenswrapper[4753]: I1205 17:09:45.457115 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-7zfzd" event={"ID":"0aa048c9-05f2-41f3-ad26-c88d5c98c453","Type":"ContainerDied","Data":"9bc21e751dd52dc6dac679d64edd3ad30539759661f044ac1f8dc69a3d3ccb75"} Dec 05 17:09:45 crc kubenswrapper[4753]: I1205 17:09:45.460394 4753 generic.go:334] "Generic (PLEG): container finished" podID="915c88c7-cac2-48b3-ab7a-6e23e7240465" containerID="aca4075916601dd74461fb12a7e864057dce2809c67c6d004bc051852e118544" exitCode=0 Dec 05 17:09:45 crc kubenswrapper[4753]: I1205 17:09:45.460458 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-b7687" event={"ID":"915c88c7-cac2-48b3-ab7a-6e23e7240465","Type":"ContainerDied","Data":"aca4075916601dd74461fb12a7e864057dce2809c67c6d004bc051852e118544"} Dec 05 17:09:45 crc kubenswrapper[4753]: I1205 17:09:45.460502 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-b7687" event={"ID":"915c88c7-cac2-48b3-ab7a-6e23e7240465","Type":"ContainerDied","Data":"11305d7e0499230bfc12453a58ac34d14e8072c0f05aa34ab17d0874546357dd"} Dec 05 17:09:45 crc kubenswrapper[4753]: I1205 17:09:45.460593 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-b7687" Dec 05 17:09:45 crc kubenswrapper[4753]: I1205 17:09:45.475668 4753 scope.go:117] "RemoveContainer" containerID="08081215dda0c7592b9709febd23a757a76cff31840f88cf1829bf956a6a6a08" Dec 05 17:09:45 crc kubenswrapper[4753]: E1205 17:09:45.476106 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"08081215dda0c7592b9709febd23a757a76cff31840f88cf1829bf956a6a6a08\": container with ID starting with 08081215dda0c7592b9709febd23a757a76cff31840f88cf1829bf956a6a6a08 not found: ID does not exist" containerID="08081215dda0c7592b9709febd23a757a76cff31840f88cf1829bf956a6a6a08" Dec 05 17:09:45 crc kubenswrapper[4753]: I1205 17:09:45.476165 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"08081215dda0c7592b9709febd23a757a76cff31840f88cf1829bf956a6a6a08"} err="failed to get container status \"08081215dda0c7592b9709febd23a757a76cff31840f88cf1829bf956a6a6a08\": rpc error: code = NotFound desc = could not find container \"08081215dda0c7592b9709febd23a757a76cff31840f88cf1829bf956a6a6a08\": container with ID starting with 08081215dda0c7592b9709febd23a757a76cff31840f88cf1829bf956a6a6a08 not found: ID does not exist" Dec 05 17:09:45 crc kubenswrapper[4753]: I1205 17:09:45.476198 4753 scope.go:117] "RemoveContainer" containerID="aca4075916601dd74461fb12a7e864057dce2809c67c6d004bc051852e118544" Dec 05 17:09:45 crc kubenswrapper[4753]: I1205 17:09:45.495806 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-7zfzd"] Dec 05 17:09:45 crc kubenswrapper[4753]: I1205 17:09:45.496235 4753 scope.go:117] "RemoveContainer" containerID="aca4075916601dd74461fb12a7e864057dce2809c67c6d004bc051852e118544" Dec 05 17:09:45 crc kubenswrapper[4753]: E1205 17:09:45.498181 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aca4075916601dd74461fb12a7e864057dce2809c67c6d004bc051852e118544\": container with ID starting with aca4075916601dd74461fb12a7e864057dce2809c67c6d004bc051852e118544 not found: ID does not exist" containerID="aca4075916601dd74461fb12a7e864057dce2809c67c6d004bc051852e118544" Dec 05 17:09:45 crc kubenswrapper[4753]: I1205 17:09:45.498238 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aca4075916601dd74461fb12a7e864057dce2809c67c6d004bc051852e118544"} err="failed to get container status \"aca4075916601dd74461fb12a7e864057dce2809c67c6d004bc051852e118544\": rpc error: code = NotFound desc = could not find container \"aca4075916601dd74461fb12a7e864057dce2809c67c6d004bc051852e118544\": container with ID starting with aca4075916601dd74461fb12a7e864057dce2809c67c6d004bc051852e118544 not found: ID does not exist" Dec 05 17:09:45 crc kubenswrapper[4753]: I1205 17:09:45.501355 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-7zfzd"] Dec 05 17:09:45 crc kubenswrapper[4753]: I1205 17:09:45.518479 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-b7687"] Dec 05 17:09:45 crc kubenswrapper[4753]: I1205 17:09:45.526489 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-b7687"] Dec 05 17:09:45 crc kubenswrapper[4753]: I1205 17:09:45.734784 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0aa048c9-05f2-41f3-ad26-c88d5c98c453" path="/var/lib/kubelet/pods/0aa048c9-05f2-41f3-ad26-c88d5c98c453/volumes" Dec 05 17:09:45 crc kubenswrapper[4753]: I1205 17:09:45.735454 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="915c88c7-cac2-48b3-ab7a-6e23e7240465" path="/var/lib/kubelet/pods/915c88c7-cac2-48b3-ab7a-6e23e7240465/volumes" Dec 05 17:09:45 crc kubenswrapper[4753]: I1205 17:09:45.994571 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-fd4c4bf8c-lcwm4"] Dec 05 17:09:45 crc kubenswrapper[4753]: E1205 17:09:45.995594 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 05 17:09:45 crc kubenswrapper[4753]: I1205 17:09:45.995619 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 05 17:09:45 crc kubenswrapper[4753]: E1205 17:09:45.995631 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="915c88c7-cac2-48b3-ab7a-6e23e7240465" containerName="controller-manager" Dec 05 17:09:45 crc kubenswrapper[4753]: I1205 17:09:45.995641 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="915c88c7-cac2-48b3-ab7a-6e23e7240465" containerName="controller-manager" Dec 05 17:09:45 crc kubenswrapper[4753]: E1205 17:09:45.995664 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0aa048c9-05f2-41f3-ad26-c88d5c98c453" containerName="route-controller-manager" Dec 05 17:09:45 crc kubenswrapper[4753]: I1205 17:09:45.995676 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="0aa048c9-05f2-41f3-ad26-c88d5c98c453" containerName="route-controller-manager" Dec 05 17:09:45 crc kubenswrapper[4753]: I1205 17:09:45.995940 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="0aa048c9-05f2-41f3-ad26-c88d5c98c453" containerName="route-controller-manager" Dec 05 17:09:45 crc kubenswrapper[4753]: I1205 17:09:45.995983 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="915c88c7-cac2-48b3-ab7a-6e23e7240465" containerName="controller-manager" Dec 05 17:09:45 crc kubenswrapper[4753]: I1205 17:09:45.996004 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 05 17:09:45 crc kubenswrapper[4753]: I1205 17:09:45.996757 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-fd4c4bf8c-lcwm4" Dec 05 17:09:46 crc kubenswrapper[4753]: I1205 17:09:46.001033 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 05 17:09:46 crc kubenswrapper[4753]: I1205 17:09:46.001939 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 05 17:09:46 crc kubenswrapper[4753]: I1205 17:09:46.002331 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 05 17:09:46 crc kubenswrapper[4753]: I1205 17:09:46.002423 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 05 17:09:46 crc kubenswrapper[4753]: I1205 17:09:46.002534 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 05 17:09:46 crc kubenswrapper[4753]: I1205 17:09:46.002676 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 05 17:09:46 crc kubenswrapper[4753]: I1205 17:09:46.006490 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-6bb55b86b7-942dc"] Dec 05 17:09:46 crc kubenswrapper[4753]: I1205 17:09:46.009997 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6bb55b86b7-942dc" Dec 05 17:09:46 crc kubenswrapper[4753]: I1205 17:09:46.013971 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 05 17:09:46 crc kubenswrapper[4753]: I1205 17:09:46.014358 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 05 17:09:46 crc kubenswrapper[4753]: I1205 17:09:46.015983 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 05 17:09:46 crc kubenswrapper[4753]: I1205 17:09:46.016089 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 05 17:09:46 crc kubenswrapper[4753]: I1205 17:09:46.016447 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 05 17:09:46 crc kubenswrapper[4753]: I1205 17:09:46.017736 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 05 17:09:46 crc kubenswrapper[4753]: I1205 17:09:46.029482 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-6bb55b86b7-942dc"] Dec 05 17:09:46 crc kubenswrapper[4753]: I1205 17:09:46.037651 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-fd4c4bf8c-lcwm4"] Dec 05 17:09:46 crc kubenswrapper[4753]: I1205 17:09:46.038688 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 05 17:09:46 crc kubenswrapper[4753]: I1205 17:09:46.173842 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/74502a8a-c734-4cd6-9687-2c4afbfa763e-serving-cert\") pod \"route-controller-manager-fd4c4bf8c-lcwm4\" (UID: \"74502a8a-c734-4cd6-9687-2c4afbfa763e\") " pod="openshift-route-controller-manager/route-controller-manager-fd4c4bf8c-lcwm4" Dec 05 17:09:46 crc kubenswrapper[4753]: I1205 17:09:46.173907 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/74502a8a-c734-4cd6-9687-2c4afbfa763e-config\") pod \"route-controller-manager-fd4c4bf8c-lcwm4\" (UID: \"74502a8a-c734-4cd6-9687-2c4afbfa763e\") " pod="openshift-route-controller-manager/route-controller-manager-fd4c4bf8c-lcwm4" Dec 05 17:09:46 crc kubenswrapper[4753]: I1205 17:09:46.173998 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5gz4q\" (UniqueName: \"kubernetes.io/projected/debd2d03-1310-4ac8-8555-90c54eb307a4-kube-api-access-5gz4q\") pod \"controller-manager-6bb55b86b7-942dc\" (UID: \"debd2d03-1310-4ac8-8555-90c54eb307a4\") " pod="openshift-controller-manager/controller-manager-6bb55b86b7-942dc" Dec 05 17:09:46 crc kubenswrapper[4753]: I1205 17:09:46.174020 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/debd2d03-1310-4ac8-8555-90c54eb307a4-serving-cert\") pod \"controller-manager-6bb55b86b7-942dc\" (UID: \"debd2d03-1310-4ac8-8555-90c54eb307a4\") " pod="openshift-controller-manager/controller-manager-6bb55b86b7-942dc" Dec 05 17:09:46 crc kubenswrapper[4753]: I1205 17:09:46.174056 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/debd2d03-1310-4ac8-8555-90c54eb307a4-config\") pod \"controller-manager-6bb55b86b7-942dc\" (UID: \"debd2d03-1310-4ac8-8555-90c54eb307a4\") " pod="openshift-controller-manager/controller-manager-6bb55b86b7-942dc" Dec 05 17:09:46 crc kubenswrapper[4753]: I1205 17:09:46.174077 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/debd2d03-1310-4ac8-8555-90c54eb307a4-client-ca\") pod \"controller-manager-6bb55b86b7-942dc\" (UID: \"debd2d03-1310-4ac8-8555-90c54eb307a4\") " pod="openshift-controller-manager/controller-manager-6bb55b86b7-942dc" Dec 05 17:09:46 crc kubenswrapper[4753]: I1205 17:09:46.174099 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/debd2d03-1310-4ac8-8555-90c54eb307a4-proxy-ca-bundles\") pod \"controller-manager-6bb55b86b7-942dc\" (UID: \"debd2d03-1310-4ac8-8555-90c54eb307a4\") " pod="openshift-controller-manager/controller-manager-6bb55b86b7-942dc" Dec 05 17:09:46 crc kubenswrapper[4753]: I1205 17:09:46.174123 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fsgzk\" (UniqueName: \"kubernetes.io/projected/74502a8a-c734-4cd6-9687-2c4afbfa763e-kube-api-access-fsgzk\") pod \"route-controller-manager-fd4c4bf8c-lcwm4\" (UID: \"74502a8a-c734-4cd6-9687-2c4afbfa763e\") " pod="openshift-route-controller-manager/route-controller-manager-fd4c4bf8c-lcwm4" Dec 05 17:09:46 crc kubenswrapper[4753]: I1205 17:09:46.174206 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/74502a8a-c734-4cd6-9687-2c4afbfa763e-client-ca\") pod \"route-controller-manager-fd4c4bf8c-lcwm4\" (UID: \"74502a8a-c734-4cd6-9687-2c4afbfa763e\") " pod="openshift-route-controller-manager/route-controller-manager-fd4c4bf8c-lcwm4" Dec 05 17:09:46 crc kubenswrapper[4753]: I1205 17:09:46.275336 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/74502a8a-c734-4cd6-9687-2c4afbfa763e-client-ca\") pod \"route-controller-manager-fd4c4bf8c-lcwm4\" (UID: \"74502a8a-c734-4cd6-9687-2c4afbfa763e\") " pod="openshift-route-controller-manager/route-controller-manager-fd4c4bf8c-lcwm4" Dec 05 17:09:46 crc kubenswrapper[4753]: I1205 17:09:46.275627 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/74502a8a-c734-4cd6-9687-2c4afbfa763e-serving-cert\") pod \"route-controller-manager-fd4c4bf8c-lcwm4\" (UID: \"74502a8a-c734-4cd6-9687-2c4afbfa763e\") " pod="openshift-route-controller-manager/route-controller-manager-fd4c4bf8c-lcwm4" Dec 05 17:09:46 crc kubenswrapper[4753]: I1205 17:09:46.275660 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/74502a8a-c734-4cd6-9687-2c4afbfa763e-config\") pod \"route-controller-manager-fd4c4bf8c-lcwm4\" (UID: \"74502a8a-c734-4cd6-9687-2c4afbfa763e\") " pod="openshift-route-controller-manager/route-controller-manager-fd4c4bf8c-lcwm4" Dec 05 17:09:46 crc kubenswrapper[4753]: I1205 17:09:46.275703 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5gz4q\" (UniqueName: \"kubernetes.io/projected/debd2d03-1310-4ac8-8555-90c54eb307a4-kube-api-access-5gz4q\") pod \"controller-manager-6bb55b86b7-942dc\" (UID: \"debd2d03-1310-4ac8-8555-90c54eb307a4\") " pod="openshift-controller-manager/controller-manager-6bb55b86b7-942dc" Dec 05 17:09:46 crc kubenswrapper[4753]: I1205 17:09:46.275729 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/debd2d03-1310-4ac8-8555-90c54eb307a4-serving-cert\") pod \"controller-manager-6bb55b86b7-942dc\" (UID: \"debd2d03-1310-4ac8-8555-90c54eb307a4\") " pod="openshift-controller-manager/controller-manager-6bb55b86b7-942dc" Dec 05 17:09:46 crc kubenswrapper[4753]: I1205 17:09:46.275756 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/debd2d03-1310-4ac8-8555-90c54eb307a4-config\") pod \"controller-manager-6bb55b86b7-942dc\" (UID: \"debd2d03-1310-4ac8-8555-90c54eb307a4\") " pod="openshift-controller-manager/controller-manager-6bb55b86b7-942dc" Dec 05 17:09:46 crc kubenswrapper[4753]: I1205 17:09:46.275777 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/debd2d03-1310-4ac8-8555-90c54eb307a4-client-ca\") pod \"controller-manager-6bb55b86b7-942dc\" (UID: \"debd2d03-1310-4ac8-8555-90c54eb307a4\") " pod="openshift-controller-manager/controller-manager-6bb55b86b7-942dc" Dec 05 17:09:46 crc kubenswrapper[4753]: I1205 17:09:46.275798 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/debd2d03-1310-4ac8-8555-90c54eb307a4-proxy-ca-bundles\") pod \"controller-manager-6bb55b86b7-942dc\" (UID: \"debd2d03-1310-4ac8-8555-90c54eb307a4\") " pod="openshift-controller-manager/controller-manager-6bb55b86b7-942dc" Dec 05 17:09:46 crc kubenswrapper[4753]: I1205 17:09:46.275816 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fsgzk\" (UniqueName: \"kubernetes.io/projected/74502a8a-c734-4cd6-9687-2c4afbfa763e-kube-api-access-fsgzk\") pod \"route-controller-manager-fd4c4bf8c-lcwm4\" (UID: \"74502a8a-c734-4cd6-9687-2c4afbfa763e\") " pod="openshift-route-controller-manager/route-controller-manager-fd4c4bf8c-lcwm4" Dec 05 17:09:46 crc kubenswrapper[4753]: I1205 17:09:46.277540 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/74502a8a-c734-4cd6-9687-2c4afbfa763e-config\") pod \"route-controller-manager-fd4c4bf8c-lcwm4\" (UID: \"74502a8a-c734-4cd6-9687-2c4afbfa763e\") " pod="openshift-route-controller-manager/route-controller-manager-fd4c4bf8c-lcwm4" Dec 05 17:09:46 crc kubenswrapper[4753]: I1205 17:09:46.277561 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/debd2d03-1310-4ac8-8555-90c54eb307a4-config\") pod \"controller-manager-6bb55b86b7-942dc\" (UID: \"debd2d03-1310-4ac8-8555-90c54eb307a4\") " pod="openshift-controller-manager/controller-manager-6bb55b86b7-942dc" Dec 05 17:09:46 crc kubenswrapper[4753]: I1205 17:09:46.277639 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/debd2d03-1310-4ac8-8555-90c54eb307a4-client-ca\") pod \"controller-manager-6bb55b86b7-942dc\" (UID: \"debd2d03-1310-4ac8-8555-90c54eb307a4\") " pod="openshift-controller-manager/controller-manager-6bb55b86b7-942dc" Dec 05 17:09:46 crc kubenswrapper[4753]: I1205 17:09:46.277816 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/74502a8a-c734-4cd6-9687-2c4afbfa763e-client-ca\") pod \"route-controller-manager-fd4c4bf8c-lcwm4\" (UID: \"74502a8a-c734-4cd6-9687-2c4afbfa763e\") " pod="openshift-route-controller-manager/route-controller-manager-fd4c4bf8c-lcwm4" Dec 05 17:09:46 crc kubenswrapper[4753]: I1205 17:09:46.278760 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/debd2d03-1310-4ac8-8555-90c54eb307a4-proxy-ca-bundles\") pod \"controller-manager-6bb55b86b7-942dc\" (UID: \"debd2d03-1310-4ac8-8555-90c54eb307a4\") " pod="openshift-controller-manager/controller-manager-6bb55b86b7-942dc" Dec 05 17:09:46 crc kubenswrapper[4753]: I1205 17:09:46.287299 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/74502a8a-c734-4cd6-9687-2c4afbfa763e-serving-cert\") pod \"route-controller-manager-fd4c4bf8c-lcwm4\" (UID: \"74502a8a-c734-4cd6-9687-2c4afbfa763e\") " pod="openshift-route-controller-manager/route-controller-manager-fd4c4bf8c-lcwm4" Dec 05 17:09:46 crc kubenswrapper[4753]: I1205 17:09:46.287341 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/debd2d03-1310-4ac8-8555-90c54eb307a4-serving-cert\") pod \"controller-manager-6bb55b86b7-942dc\" (UID: \"debd2d03-1310-4ac8-8555-90c54eb307a4\") " pod="openshift-controller-manager/controller-manager-6bb55b86b7-942dc" Dec 05 17:09:46 crc kubenswrapper[4753]: I1205 17:09:46.295345 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fsgzk\" (UniqueName: \"kubernetes.io/projected/74502a8a-c734-4cd6-9687-2c4afbfa763e-kube-api-access-fsgzk\") pod \"route-controller-manager-fd4c4bf8c-lcwm4\" (UID: \"74502a8a-c734-4cd6-9687-2c4afbfa763e\") " pod="openshift-route-controller-manager/route-controller-manager-fd4c4bf8c-lcwm4" Dec 05 17:09:46 crc kubenswrapper[4753]: I1205 17:09:46.300068 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5gz4q\" (UniqueName: \"kubernetes.io/projected/debd2d03-1310-4ac8-8555-90c54eb307a4-kube-api-access-5gz4q\") pod \"controller-manager-6bb55b86b7-942dc\" (UID: \"debd2d03-1310-4ac8-8555-90c54eb307a4\") " pod="openshift-controller-manager/controller-manager-6bb55b86b7-942dc" Dec 05 17:09:46 crc kubenswrapper[4753]: I1205 17:09:46.328437 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-fd4c4bf8c-lcwm4" Dec 05 17:09:46 crc kubenswrapper[4753]: I1205 17:09:46.340019 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6bb55b86b7-942dc" Dec 05 17:09:46 crc kubenswrapper[4753]: I1205 17:09:46.692434 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-6bb55b86b7-942dc"] Dec 05 17:09:46 crc kubenswrapper[4753]: I1205 17:09:46.851589 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-fd4c4bf8c-lcwm4"] Dec 05 17:09:47 crc kubenswrapper[4753]: I1205 17:09:47.512542 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-fd4c4bf8c-lcwm4" event={"ID":"74502a8a-c734-4cd6-9687-2c4afbfa763e","Type":"ContainerStarted","Data":"9803a350efc9bdb34357e8bee1f6df8f49cfe2b7dc6456d03a34a837bb7c9dc4"} Dec 05 17:09:47 crc kubenswrapper[4753]: I1205 17:09:47.512920 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-fd4c4bf8c-lcwm4" event={"ID":"74502a8a-c734-4cd6-9687-2c4afbfa763e","Type":"ContainerStarted","Data":"9c1a93b0725b1cf085a3abb451ce6f065f2cf03ad47437c8968c372e03a6127b"} Dec 05 17:09:47 crc kubenswrapper[4753]: I1205 17:09:47.513560 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-fd4c4bf8c-lcwm4" Dec 05 17:09:47 crc kubenswrapper[4753]: I1205 17:09:47.515294 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6bb55b86b7-942dc" event={"ID":"debd2d03-1310-4ac8-8555-90c54eb307a4","Type":"ContainerStarted","Data":"64482c92af3e7c071f86c9f7c660375c435131883c50b1b83cbb06498692b915"} Dec 05 17:09:47 crc kubenswrapper[4753]: I1205 17:09:47.515319 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6bb55b86b7-942dc" event={"ID":"debd2d03-1310-4ac8-8555-90c54eb307a4","Type":"ContainerStarted","Data":"b18f24b462f7d3cb92ac6c66f7ef664d385116f70a6e7fb4b0cf971180fde9e3"} Dec 05 17:09:47 crc kubenswrapper[4753]: I1205 17:09:47.515922 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-6bb55b86b7-942dc" Dec 05 17:09:47 crc kubenswrapper[4753]: I1205 17:09:47.523233 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-6bb55b86b7-942dc" Dec 05 17:09:47 crc kubenswrapper[4753]: I1205 17:09:47.538815 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-fd4c4bf8c-lcwm4" Dec 05 17:09:47 crc kubenswrapper[4753]: I1205 17:09:47.539087 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-fd4c4bf8c-lcwm4" podStartSLOduration=3.539071476 podStartE2EDuration="3.539071476s" podCreationTimestamp="2025-12-05 17:09:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:09:47.536049516 +0000 UTC m=+326.039156522" watchObservedRunningTime="2025-12-05 17:09:47.539071476 +0000 UTC m=+326.042178482" Dec 05 17:09:47 crc kubenswrapper[4753]: I1205 17:09:47.563093 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-6bb55b86b7-942dc" podStartSLOduration=3.563070926 podStartE2EDuration="3.563070926s" podCreationTimestamp="2025-12-05 17:09:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:09:47.556500232 +0000 UTC m=+326.059607238" watchObservedRunningTime="2025-12-05 17:09:47.563070926 +0000 UTC m=+326.066177932" Dec 05 17:10:10 crc kubenswrapper[4753]: I1205 17:10:10.928418 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-j7hx9"] Dec 05 17:10:10 crc kubenswrapper[4753]: I1205 17:10:10.930290 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-j7hx9" Dec 05 17:10:10 crc kubenswrapper[4753]: I1205 17:10:10.949716 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-j7hx9"] Dec 05 17:10:11 crc kubenswrapper[4753]: I1205 17:10:11.057691 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/920fee85-8e60-41d5-b3ba-5b61607972fd-trusted-ca\") pod \"image-registry-66df7c8f76-j7hx9\" (UID: \"920fee85-8e60-41d5-b3ba-5b61607972fd\") " pod="openshift-image-registry/image-registry-66df7c8f76-j7hx9" Dec 05 17:10:11 crc kubenswrapper[4753]: I1205 17:10:11.057772 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/920fee85-8e60-41d5-b3ba-5b61607972fd-bound-sa-token\") pod \"image-registry-66df7c8f76-j7hx9\" (UID: \"920fee85-8e60-41d5-b3ba-5b61607972fd\") " pod="openshift-image-registry/image-registry-66df7c8f76-j7hx9" Dec 05 17:10:11 crc kubenswrapper[4753]: I1205 17:10:11.057813 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/920fee85-8e60-41d5-b3ba-5b61607972fd-registry-tls\") pod \"image-registry-66df7c8f76-j7hx9\" (UID: \"920fee85-8e60-41d5-b3ba-5b61607972fd\") " pod="openshift-image-registry/image-registry-66df7c8f76-j7hx9" Dec 05 17:10:11 crc kubenswrapper[4753]: I1205 17:10:11.057850 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/920fee85-8e60-41d5-b3ba-5b61607972fd-installation-pull-secrets\") pod \"image-registry-66df7c8f76-j7hx9\" (UID: \"920fee85-8e60-41d5-b3ba-5b61607972fd\") " pod="openshift-image-registry/image-registry-66df7c8f76-j7hx9" Dec 05 17:10:11 crc kubenswrapper[4753]: I1205 17:10:11.057898 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x7c8c\" (UniqueName: \"kubernetes.io/projected/920fee85-8e60-41d5-b3ba-5b61607972fd-kube-api-access-x7c8c\") pod \"image-registry-66df7c8f76-j7hx9\" (UID: \"920fee85-8e60-41d5-b3ba-5b61607972fd\") " pod="openshift-image-registry/image-registry-66df7c8f76-j7hx9" Dec 05 17:10:11 crc kubenswrapper[4753]: I1205 17:10:11.057943 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-j7hx9\" (UID: \"920fee85-8e60-41d5-b3ba-5b61607972fd\") " pod="openshift-image-registry/image-registry-66df7c8f76-j7hx9" Dec 05 17:10:11 crc kubenswrapper[4753]: I1205 17:10:11.058031 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/920fee85-8e60-41d5-b3ba-5b61607972fd-registry-certificates\") pod \"image-registry-66df7c8f76-j7hx9\" (UID: \"920fee85-8e60-41d5-b3ba-5b61607972fd\") " pod="openshift-image-registry/image-registry-66df7c8f76-j7hx9" Dec 05 17:10:11 crc kubenswrapper[4753]: I1205 17:10:11.058163 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/920fee85-8e60-41d5-b3ba-5b61607972fd-ca-trust-extracted\") pod \"image-registry-66df7c8f76-j7hx9\" (UID: \"920fee85-8e60-41d5-b3ba-5b61607972fd\") " pod="openshift-image-registry/image-registry-66df7c8f76-j7hx9" Dec 05 17:10:11 crc kubenswrapper[4753]: I1205 17:10:11.084783 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-j7hx9\" (UID: \"920fee85-8e60-41d5-b3ba-5b61607972fd\") " pod="openshift-image-registry/image-registry-66df7c8f76-j7hx9" Dec 05 17:10:11 crc kubenswrapper[4753]: I1205 17:10:11.160454 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/920fee85-8e60-41d5-b3ba-5b61607972fd-trusted-ca\") pod \"image-registry-66df7c8f76-j7hx9\" (UID: \"920fee85-8e60-41d5-b3ba-5b61607972fd\") " pod="openshift-image-registry/image-registry-66df7c8f76-j7hx9" Dec 05 17:10:11 crc kubenswrapper[4753]: I1205 17:10:11.160583 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/920fee85-8e60-41d5-b3ba-5b61607972fd-bound-sa-token\") pod \"image-registry-66df7c8f76-j7hx9\" (UID: \"920fee85-8e60-41d5-b3ba-5b61607972fd\") " pod="openshift-image-registry/image-registry-66df7c8f76-j7hx9" Dec 05 17:10:11 crc kubenswrapper[4753]: I1205 17:10:11.160623 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/920fee85-8e60-41d5-b3ba-5b61607972fd-registry-tls\") pod \"image-registry-66df7c8f76-j7hx9\" (UID: \"920fee85-8e60-41d5-b3ba-5b61607972fd\") " pod="openshift-image-registry/image-registry-66df7c8f76-j7hx9" Dec 05 17:10:11 crc kubenswrapper[4753]: I1205 17:10:11.160668 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/920fee85-8e60-41d5-b3ba-5b61607972fd-installation-pull-secrets\") pod \"image-registry-66df7c8f76-j7hx9\" (UID: \"920fee85-8e60-41d5-b3ba-5b61607972fd\") " pod="openshift-image-registry/image-registry-66df7c8f76-j7hx9" Dec 05 17:10:11 crc kubenswrapper[4753]: I1205 17:10:11.160722 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x7c8c\" (UniqueName: \"kubernetes.io/projected/920fee85-8e60-41d5-b3ba-5b61607972fd-kube-api-access-x7c8c\") pod \"image-registry-66df7c8f76-j7hx9\" (UID: \"920fee85-8e60-41d5-b3ba-5b61607972fd\") " pod="openshift-image-registry/image-registry-66df7c8f76-j7hx9" Dec 05 17:10:11 crc kubenswrapper[4753]: I1205 17:10:11.160763 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/920fee85-8e60-41d5-b3ba-5b61607972fd-registry-certificates\") pod \"image-registry-66df7c8f76-j7hx9\" (UID: \"920fee85-8e60-41d5-b3ba-5b61607972fd\") " pod="openshift-image-registry/image-registry-66df7c8f76-j7hx9" Dec 05 17:10:11 crc kubenswrapper[4753]: I1205 17:10:11.160802 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/920fee85-8e60-41d5-b3ba-5b61607972fd-ca-trust-extracted\") pod \"image-registry-66df7c8f76-j7hx9\" (UID: \"920fee85-8e60-41d5-b3ba-5b61607972fd\") " pod="openshift-image-registry/image-registry-66df7c8f76-j7hx9" Dec 05 17:10:11 crc kubenswrapper[4753]: I1205 17:10:11.161588 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/920fee85-8e60-41d5-b3ba-5b61607972fd-ca-trust-extracted\") pod \"image-registry-66df7c8f76-j7hx9\" (UID: \"920fee85-8e60-41d5-b3ba-5b61607972fd\") " pod="openshift-image-registry/image-registry-66df7c8f76-j7hx9" Dec 05 17:10:11 crc kubenswrapper[4753]: I1205 17:10:11.162102 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/920fee85-8e60-41d5-b3ba-5b61607972fd-registry-certificates\") pod \"image-registry-66df7c8f76-j7hx9\" (UID: \"920fee85-8e60-41d5-b3ba-5b61607972fd\") " pod="openshift-image-registry/image-registry-66df7c8f76-j7hx9" Dec 05 17:10:11 crc kubenswrapper[4753]: I1205 17:10:11.162570 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/920fee85-8e60-41d5-b3ba-5b61607972fd-trusted-ca\") pod \"image-registry-66df7c8f76-j7hx9\" (UID: \"920fee85-8e60-41d5-b3ba-5b61607972fd\") " pod="openshift-image-registry/image-registry-66df7c8f76-j7hx9" Dec 05 17:10:11 crc kubenswrapper[4753]: I1205 17:10:11.166965 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/920fee85-8e60-41d5-b3ba-5b61607972fd-registry-tls\") pod \"image-registry-66df7c8f76-j7hx9\" (UID: \"920fee85-8e60-41d5-b3ba-5b61607972fd\") " pod="openshift-image-registry/image-registry-66df7c8f76-j7hx9" Dec 05 17:10:11 crc kubenswrapper[4753]: I1205 17:10:11.166977 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/920fee85-8e60-41d5-b3ba-5b61607972fd-installation-pull-secrets\") pod \"image-registry-66df7c8f76-j7hx9\" (UID: \"920fee85-8e60-41d5-b3ba-5b61607972fd\") " pod="openshift-image-registry/image-registry-66df7c8f76-j7hx9" Dec 05 17:10:11 crc kubenswrapper[4753]: I1205 17:10:11.176932 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/920fee85-8e60-41d5-b3ba-5b61607972fd-bound-sa-token\") pod \"image-registry-66df7c8f76-j7hx9\" (UID: \"920fee85-8e60-41d5-b3ba-5b61607972fd\") " pod="openshift-image-registry/image-registry-66df7c8f76-j7hx9" Dec 05 17:10:11 crc kubenswrapper[4753]: I1205 17:10:11.179180 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x7c8c\" (UniqueName: \"kubernetes.io/projected/920fee85-8e60-41d5-b3ba-5b61607972fd-kube-api-access-x7c8c\") pod \"image-registry-66df7c8f76-j7hx9\" (UID: \"920fee85-8e60-41d5-b3ba-5b61607972fd\") " pod="openshift-image-registry/image-registry-66df7c8f76-j7hx9" Dec 05 17:10:11 crc kubenswrapper[4753]: I1205 17:10:11.250718 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-j7hx9" Dec 05 17:10:11 crc kubenswrapper[4753]: I1205 17:10:11.801641 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-j7hx9"] Dec 05 17:10:12 crc kubenswrapper[4753]: I1205 17:10:12.682594 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-j7hx9" event={"ID":"920fee85-8e60-41d5-b3ba-5b61607972fd","Type":"ContainerStarted","Data":"d5ad3cbb25f5236bafce82f134060e6ce0194ac0ea15e23329b13ad53c4d19ac"} Dec 05 17:10:12 crc kubenswrapper[4753]: I1205 17:10:12.683198 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-j7hx9" event={"ID":"920fee85-8e60-41d5-b3ba-5b61607972fd","Type":"ContainerStarted","Data":"ddba8afad784f85d550fd70a68416fd9a671e61fe093212afc0b4f72b3cde32f"} Dec 05 17:10:12 crc kubenswrapper[4753]: I1205 17:10:12.683227 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-j7hx9" Dec 05 17:10:12 crc kubenswrapper[4753]: I1205 17:10:12.705653 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-j7hx9" podStartSLOduration=2.705615628 podStartE2EDuration="2.705615628s" podCreationTimestamp="2025-12-05 17:10:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:10:12.701080136 +0000 UTC m=+351.204187172" watchObservedRunningTime="2025-12-05 17:10:12.705615628 +0000 UTC m=+351.208722634" Dec 05 17:10:28 crc kubenswrapper[4753]: I1205 17:10:28.788521 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-6bb55b86b7-942dc"] Dec 05 17:10:28 crc kubenswrapper[4753]: I1205 17:10:28.789384 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-6bb55b86b7-942dc" podUID="debd2d03-1310-4ac8-8555-90c54eb307a4" containerName="controller-manager" containerID="cri-o://64482c92af3e7c071f86c9f7c660375c435131883c50b1b83cbb06498692b915" gracePeriod=30 Dec 05 17:10:28 crc kubenswrapper[4753]: I1205 17:10:28.879476 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-fd4c4bf8c-lcwm4"] Dec 05 17:10:28 crc kubenswrapper[4753]: I1205 17:10:28.882373 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-fd4c4bf8c-lcwm4" podUID="74502a8a-c734-4cd6-9687-2c4afbfa763e" containerName="route-controller-manager" containerID="cri-o://9803a350efc9bdb34357e8bee1f6df8f49cfe2b7dc6456d03a34a837bb7c9dc4" gracePeriod=30 Dec 05 17:10:28 crc kubenswrapper[4753]: I1205 17:10:28.978805 4753 patch_prober.go:28] interesting pod/machine-config-daemon-khn68 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 17:10:28 crc kubenswrapper[4753]: I1205 17:10:28.979218 4753 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 17:10:29 crc kubenswrapper[4753]: I1205 17:10:29.234029 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6bb55b86b7-942dc" Dec 05 17:10:29 crc kubenswrapper[4753]: I1205 17:10:29.287408 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-fd4c4bf8c-lcwm4" Dec 05 17:10:29 crc kubenswrapper[4753]: I1205 17:10:29.413529 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fsgzk\" (UniqueName: \"kubernetes.io/projected/74502a8a-c734-4cd6-9687-2c4afbfa763e-kube-api-access-fsgzk\") pod \"74502a8a-c734-4cd6-9687-2c4afbfa763e\" (UID: \"74502a8a-c734-4cd6-9687-2c4afbfa763e\") " Dec 05 17:10:29 crc kubenswrapper[4753]: I1205 17:10:29.413625 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/debd2d03-1310-4ac8-8555-90c54eb307a4-proxy-ca-bundles\") pod \"debd2d03-1310-4ac8-8555-90c54eb307a4\" (UID: \"debd2d03-1310-4ac8-8555-90c54eb307a4\") " Dec 05 17:10:29 crc kubenswrapper[4753]: I1205 17:10:29.413673 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/debd2d03-1310-4ac8-8555-90c54eb307a4-serving-cert\") pod \"debd2d03-1310-4ac8-8555-90c54eb307a4\" (UID: \"debd2d03-1310-4ac8-8555-90c54eb307a4\") " Dec 05 17:10:29 crc kubenswrapper[4753]: I1205 17:10:29.413698 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/74502a8a-c734-4cd6-9687-2c4afbfa763e-client-ca\") pod \"74502a8a-c734-4cd6-9687-2c4afbfa763e\" (UID: \"74502a8a-c734-4cd6-9687-2c4afbfa763e\") " Dec 05 17:10:29 crc kubenswrapper[4753]: I1205 17:10:29.413727 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5gz4q\" (UniqueName: \"kubernetes.io/projected/debd2d03-1310-4ac8-8555-90c54eb307a4-kube-api-access-5gz4q\") pod \"debd2d03-1310-4ac8-8555-90c54eb307a4\" (UID: \"debd2d03-1310-4ac8-8555-90c54eb307a4\") " Dec 05 17:10:29 crc kubenswrapper[4753]: I1205 17:10:29.413797 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/debd2d03-1310-4ac8-8555-90c54eb307a4-client-ca\") pod \"debd2d03-1310-4ac8-8555-90c54eb307a4\" (UID: \"debd2d03-1310-4ac8-8555-90c54eb307a4\") " Dec 05 17:10:29 crc kubenswrapper[4753]: I1205 17:10:29.413849 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/74502a8a-c734-4cd6-9687-2c4afbfa763e-serving-cert\") pod \"74502a8a-c734-4cd6-9687-2c4afbfa763e\" (UID: \"74502a8a-c734-4cd6-9687-2c4afbfa763e\") " Dec 05 17:10:29 crc kubenswrapper[4753]: I1205 17:10:29.413876 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/74502a8a-c734-4cd6-9687-2c4afbfa763e-config\") pod \"74502a8a-c734-4cd6-9687-2c4afbfa763e\" (UID: \"74502a8a-c734-4cd6-9687-2c4afbfa763e\") " Dec 05 17:10:29 crc kubenswrapper[4753]: I1205 17:10:29.413927 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/debd2d03-1310-4ac8-8555-90c54eb307a4-config\") pod \"debd2d03-1310-4ac8-8555-90c54eb307a4\" (UID: \"debd2d03-1310-4ac8-8555-90c54eb307a4\") " Dec 05 17:10:29 crc kubenswrapper[4753]: I1205 17:10:29.415582 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/debd2d03-1310-4ac8-8555-90c54eb307a4-client-ca" (OuterVolumeSpecName: "client-ca") pod "debd2d03-1310-4ac8-8555-90c54eb307a4" (UID: "debd2d03-1310-4ac8-8555-90c54eb307a4"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:10:29 crc kubenswrapper[4753]: I1205 17:10:29.415841 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/debd2d03-1310-4ac8-8555-90c54eb307a4-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "debd2d03-1310-4ac8-8555-90c54eb307a4" (UID: "debd2d03-1310-4ac8-8555-90c54eb307a4"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:10:29 crc kubenswrapper[4753]: I1205 17:10:29.416210 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/74502a8a-c734-4cd6-9687-2c4afbfa763e-config" (OuterVolumeSpecName: "config") pod "74502a8a-c734-4cd6-9687-2c4afbfa763e" (UID: "74502a8a-c734-4cd6-9687-2c4afbfa763e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:10:29 crc kubenswrapper[4753]: I1205 17:10:29.416472 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/debd2d03-1310-4ac8-8555-90c54eb307a4-config" (OuterVolumeSpecName: "config") pod "debd2d03-1310-4ac8-8555-90c54eb307a4" (UID: "debd2d03-1310-4ac8-8555-90c54eb307a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:10:29 crc kubenswrapper[4753]: I1205 17:10:29.418362 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/74502a8a-c734-4cd6-9687-2c4afbfa763e-client-ca" (OuterVolumeSpecName: "client-ca") pod "74502a8a-c734-4cd6-9687-2c4afbfa763e" (UID: "74502a8a-c734-4cd6-9687-2c4afbfa763e"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:10:29 crc kubenswrapper[4753]: I1205 17:10:29.421713 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/debd2d03-1310-4ac8-8555-90c54eb307a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "debd2d03-1310-4ac8-8555-90c54eb307a4" (UID: "debd2d03-1310-4ac8-8555-90c54eb307a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:10:29 crc kubenswrapper[4753]: I1205 17:10:29.423030 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/debd2d03-1310-4ac8-8555-90c54eb307a4-kube-api-access-5gz4q" (OuterVolumeSpecName: "kube-api-access-5gz4q") pod "debd2d03-1310-4ac8-8555-90c54eb307a4" (UID: "debd2d03-1310-4ac8-8555-90c54eb307a4"). InnerVolumeSpecName "kube-api-access-5gz4q". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:10:29 crc kubenswrapper[4753]: I1205 17:10:29.423382 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/74502a8a-c734-4cd6-9687-2c4afbfa763e-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "74502a8a-c734-4cd6-9687-2c4afbfa763e" (UID: "74502a8a-c734-4cd6-9687-2c4afbfa763e"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:10:29 crc kubenswrapper[4753]: I1205 17:10:29.425445 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/74502a8a-c734-4cd6-9687-2c4afbfa763e-kube-api-access-fsgzk" (OuterVolumeSpecName: "kube-api-access-fsgzk") pod "74502a8a-c734-4cd6-9687-2c4afbfa763e" (UID: "74502a8a-c734-4cd6-9687-2c4afbfa763e"). InnerVolumeSpecName "kube-api-access-fsgzk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:10:29 crc kubenswrapper[4753]: I1205 17:10:29.515869 4753 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/debd2d03-1310-4ac8-8555-90c54eb307a4-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 17:10:29 crc kubenswrapper[4753]: I1205 17:10:29.516430 4753 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/74502a8a-c734-4cd6-9687-2c4afbfa763e-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 17:10:29 crc kubenswrapper[4753]: I1205 17:10:29.516534 4753 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/74502a8a-c734-4cd6-9687-2c4afbfa763e-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:10:29 crc kubenswrapper[4753]: I1205 17:10:29.516619 4753 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/debd2d03-1310-4ac8-8555-90c54eb307a4-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:10:29 crc kubenswrapper[4753]: I1205 17:10:29.516707 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fsgzk\" (UniqueName: \"kubernetes.io/projected/74502a8a-c734-4cd6-9687-2c4afbfa763e-kube-api-access-fsgzk\") on node \"crc\" DevicePath \"\"" Dec 05 17:10:29 crc kubenswrapper[4753]: I1205 17:10:29.516802 4753 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/debd2d03-1310-4ac8-8555-90c54eb307a4-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 05 17:10:29 crc kubenswrapper[4753]: I1205 17:10:29.516895 4753 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/debd2d03-1310-4ac8-8555-90c54eb307a4-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 17:10:29 crc kubenswrapper[4753]: I1205 17:10:29.517011 4753 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/74502a8a-c734-4cd6-9687-2c4afbfa763e-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 17:10:29 crc kubenswrapper[4753]: I1205 17:10:29.517112 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5gz4q\" (UniqueName: \"kubernetes.io/projected/debd2d03-1310-4ac8-8555-90c54eb307a4-kube-api-access-5gz4q\") on node \"crc\" DevicePath \"\"" Dec 05 17:10:29 crc kubenswrapper[4753]: I1205 17:10:29.803980 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-dgpzc"] Dec 05 17:10:29 crc kubenswrapper[4753]: I1205 17:10:29.804451 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-dgpzc" podUID="5560048a-c9e9-4743-9573-1b58a2240c29" containerName="registry-server" containerID="cri-o://f5933bb2c58a4c2827813946aed957f3d8f26119b2c0827b98048b8f4da09914" gracePeriod=30 Dec 05 17:10:29 crc kubenswrapper[4753]: I1205 17:10:29.829506 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-cb4ng"] Dec 05 17:10:29 crc kubenswrapper[4753]: I1205 17:10:29.830005 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-cb4ng" podUID="db1dcb29-b751-4a77-8f2e-68efadf955b9" containerName="registry-server" containerID="cri-o://905f329b9e0d4b8141b862cca0e0475439f65fc03c4b986da1771fc05b0b3045" gracePeriod=30 Dec 05 17:10:29 crc kubenswrapper[4753]: I1205 17:10:29.847095 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-s87ds"] Dec 05 17:10:29 crc kubenswrapper[4753]: I1205 17:10:29.847681 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-s87ds" podUID="03fd3f93-fb41-4e32-8276-416c40f2b9a7" containerName="marketplace-operator" containerID="cri-o://eb799446224d7545cfa481b5721a5a1747e0fdfbc7bcd537eb6ec235acfb30a1" gracePeriod=30 Dec 05 17:10:29 crc kubenswrapper[4753]: I1205 17:10:29.849463 4753 generic.go:334] "Generic (PLEG): container finished" podID="74502a8a-c734-4cd6-9687-2c4afbfa763e" containerID="9803a350efc9bdb34357e8bee1f6df8f49cfe2b7dc6456d03a34a837bb7c9dc4" exitCode=0 Dec 05 17:10:29 crc kubenswrapper[4753]: I1205 17:10:29.849595 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-fd4c4bf8c-lcwm4" event={"ID":"74502a8a-c734-4cd6-9687-2c4afbfa763e","Type":"ContainerDied","Data":"9803a350efc9bdb34357e8bee1f6df8f49cfe2b7dc6456d03a34a837bb7c9dc4"} Dec 05 17:10:29 crc kubenswrapper[4753]: I1205 17:10:29.849628 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-fd4c4bf8c-lcwm4" event={"ID":"74502a8a-c734-4cd6-9687-2c4afbfa763e","Type":"ContainerDied","Data":"9c1a93b0725b1cf085a3abb451ce6f065f2cf03ad47437c8968c372e03a6127b"} Dec 05 17:10:29 crc kubenswrapper[4753]: I1205 17:10:29.849649 4753 scope.go:117] "RemoveContainer" containerID="9803a350efc9bdb34357e8bee1f6df8f49cfe2b7dc6456d03a34a837bb7c9dc4" Dec 05 17:10:29 crc kubenswrapper[4753]: I1205 17:10:29.849760 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-fd4c4bf8c-lcwm4" Dec 05 17:10:29 crc kubenswrapper[4753]: I1205 17:10:29.861555 4753 generic.go:334] "Generic (PLEG): container finished" podID="debd2d03-1310-4ac8-8555-90c54eb307a4" containerID="64482c92af3e7c071f86c9f7c660375c435131883c50b1b83cbb06498692b915" exitCode=0 Dec 05 17:10:29 crc kubenswrapper[4753]: I1205 17:10:29.861728 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6bb55b86b7-942dc" event={"ID":"debd2d03-1310-4ac8-8555-90c54eb307a4","Type":"ContainerDied","Data":"64482c92af3e7c071f86c9f7c660375c435131883c50b1b83cbb06498692b915"} Dec 05 17:10:29 crc kubenswrapper[4753]: I1205 17:10:29.861828 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6bb55b86b7-942dc" event={"ID":"debd2d03-1310-4ac8-8555-90c54eb307a4","Type":"ContainerDied","Data":"b18f24b462f7d3cb92ac6c66f7ef664d385116f70a6e7fb4b0cf971180fde9e3"} Dec 05 17:10:29 crc kubenswrapper[4753]: I1205 17:10:29.861974 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6bb55b86b7-942dc" Dec 05 17:10:29 crc kubenswrapper[4753]: I1205 17:10:29.878417 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-c4kqm"] Dec 05 17:10:29 crc kubenswrapper[4753]: I1205 17:10:29.878771 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-c4kqm" podUID="ee9a0d97-2cf8-48bc-b90a-59f0c4c3b76b" containerName="registry-server" containerID="cri-o://1bd24ff8a3b303afe709884c1a501f9b3a4a44ed9c7bd565ee466ab3d370dee7" gracePeriod=30 Dec 05 17:10:29 crc kubenswrapper[4753]: I1205 17:10:29.895250 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-6ggrp"] Dec 05 17:10:29 crc kubenswrapper[4753]: I1205 17:10:29.895731 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-6ggrp" podUID="36b2ef01-2c7a-4313-a03a-be77a660d987" containerName="registry-server" containerID="cri-o://1a12fe20398cc5661c131c9376f28ec36f30003882c9933cc82a40021ab9b3a4" gracePeriod=30 Dec 05 17:10:29 crc kubenswrapper[4753]: I1205 17:10:29.911494 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-8rtl5"] Dec 05 17:10:29 crc kubenswrapper[4753]: E1205 17:10:29.911785 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="debd2d03-1310-4ac8-8555-90c54eb307a4" containerName="controller-manager" Dec 05 17:10:29 crc kubenswrapper[4753]: I1205 17:10:29.911803 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="debd2d03-1310-4ac8-8555-90c54eb307a4" containerName="controller-manager" Dec 05 17:10:29 crc kubenswrapper[4753]: E1205 17:10:29.911814 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74502a8a-c734-4cd6-9687-2c4afbfa763e" containerName="route-controller-manager" Dec 05 17:10:29 crc kubenswrapper[4753]: I1205 17:10:29.911823 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="74502a8a-c734-4cd6-9687-2c4afbfa763e" containerName="route-controller-manager" Dec 05 17:10:29 crc kubenswrapper[4753]: I1205 17:10:29.911929 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="74502a8a-c734-4cd6-9687-2c4afbfa763e" containerName="route-controller-manager" Dec 05 17:10:29 crc kubenswrapper[4753]: I1205 17:10:29.911950 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="debd2d03-1310-4ac8-8555-90c54eb307a4" containerName="controller-manager" Dec 05 17:10:29 crc kubenswrapper[4753]: I1205 17:10:29.912581 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-8rtl5" Dec 05 17:10:29 crc kubenswrapper[4753]: I1205 17:10:29.923821 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-8rtl5"] Dec 05 17:10:29 crc kubenswrapper[4753]: I1205 17:10:29.947238 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-fd4c4bf8c-lcwm4"] Dec 05 17:10:29 crc kubenswrapper[4753]: I1205 17:10:29.951535 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-fd4c4bf8c-lcwm4"] Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.021484 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-697546d85-w8bh9"] Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.022939 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-697546d85-w8bh9" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.026936 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sbvm2\" (UniqueName: \"kubernetes.io/projected/7aeabdb9-1aef-44d2-85e5-c17fcb4290be-kube-api-access-sbvm2\") pod \"marketplace-operator-79b997595-8rtl5\" (UID: \"7aeabdb9-1aef-44d2-85e5-c17fcb4290be\") " pod="openshift-marketplace/marketplace-operator-79b997595-8rtl5" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.027064 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7aeabdb9-1aef-44d2-85e5-c17fcb4290be-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-8rtl5\" (UID: \"7aeabdb9-1aef-44d2-85e5-c17fcb4290be\") " pod="openshift-marketplace/marketplace-operator-79b997595-8rtl5" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.027176 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/7aeabdb9-1aef-44d2-85e5-c17fcb4290be-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-8rtl5\" (UID: \"7aeabdb9-1aef-44d2-85e5-c17fcb4290be\") " pod="openshift-marketplace/marketplace-operator-79b997595-8rtl5" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.030811 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.032254 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-b88ff9559-jrwxq"] Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.033276 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-b88ff9559-jrwxq" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.034717 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-697546d85-w8bh9"] Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.037403 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.037851 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.037959 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.038137 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.038481 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.038706 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.039277 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.041096 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.041115 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.042411 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.043621 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.046476 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.066326 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-b88ff9559-jrwxq"] Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.128506 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/75da1bbe-de00-4674-a99f-0ef291146c5f-proxy-ca-bundles\") pod \"controller-manager-697546d85-w8bh9\" (UID: \"75da1bbe-de00-4674-a99f-0ef291146c5f\") " pod="openshift-controller-manager/controller-manager-697546d85-w8bh9" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.129080 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/92261e30-74b9-4603-a401-43f1c7baa54b-client-ca\") pod \"route-controller-manager-b88ff9559-jrwxq\" (UID: \"92261e30-74b9-4603-a401-43f1c7baa54b\") " pod="openshift-route-controller-manager/route-controller-manager-b88ff9559-jrwxq" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.129137 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sbvm2\" (UniqueName: \"kubernetes.io/projected/7aeabdb9-1aef-44d2-85e5-c17fcb4290be-kube-api-access-sbvm2\") pod \"marketplace-operator-79b997595-8rtl5\" (UID: \"7aeabdb9-1aef-44d2-85e5-c17fcb4290be\") " pod="openshift-marketplace/marketplace-operator-79b997595-8rtl5" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.129751 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7aeabdb9-1aef-44d2-85e5-c17fcb4290be-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-8rtl5\" (UID: \"7aeabdb9-1aef-44d2-85e5-c17fcb4290be\") " pod="openshift-marketplace/marketplace-operator-79b997595-8rtl5" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.129798 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bkp6b\" (UniqueName: \"kubernetes.io/projected/75da1bbe-de00-4674-a99f-0ef291146c5f-kube-api-access-bkp6b\") pod \"controller-manager-697546d85-w8bh9\" (UID: \"75da1bbe-de00-4674-a99f-0ef291146c5f\") " pod="openshift-controller-manager/controller-manager-697546d85-w8bh9" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.129848 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/75da1bbe-de00-4674-a99f-0ef291146c5f-client-ca\") pod \"controller-manager-697546d85-w8bh9\" (UID: \"75da1bbe-de00-4674-a99f-0ef291146c5f\") " pod="openshift-controller-manager/controller-manager-697546d85-w8bh9" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.129900 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/75da1bbe-de00-4674-a99f-0ef291146c5f-serving-cert\") pod \"controller-manager-697546d85-w8bh9\" (UID: \"75da1bbe-de00-4674-a99f-0ef291146c5f\") " pod="openshift-controller-manager/controller-manager-697546d85-w8bh9" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.129930 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-48zc6\" (UniqueName: \"kubernetes.io/projected/92261e30-74b9-4603-a401-43f1c7baa54b-kube-api-access-48zc6\") pod \"route-controller-manager-b88ff9559-jrwxq\" (UID: \"92261e30-74b9-4603-a401-43f1c7baa54b\") " pod="openshift-route-controller-manager/route-controller-manager-b88ff9559-jrwxq" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.129968 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/7aeabdb9-1aef-44d2-85e5-c17fcb4290be-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-8rtl5\" (UID: \"7aeabdb9-1aef-44d2-85e5-c17fcb4290be\") " pod="openshift-marketplace/marketplace-operator-79b997595-8rtl5" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.130024 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/92261e30-74b9-4603-a401-43f1c7baa54b-serving-cert\") pod \"route-controller-manager-b88ff9559-jrwxq\" (UID: \"92261e30-74b9-4603-a401-43f1c7baa54b\") " pod="openshift-route-controller-manager/route-controller-manager-b88ff9559-jrwxq" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.130062 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/75da1bbe-de00-4674-a99f-0ef291146c5f-config\") pod \"controller-manager-697546d85-w8bh9\" (UID: \"75da1bbe-de00-4674-a99f-0ef291146c5f\") " pod="openshift-controller-manager/controller-manager-697546d85-w8bh9" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.130110 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/92261e30-74b9-4603-a401-43f1c7baa54b-config\") pod \"route-controller-manager-b88ff9559-jrwxq\" (UID: \"92261e30-74b9-4603-a401-43f1c7baa54b\") " pod="openshift-route-controller-manager/route-controller-manager-b88ff9559-jrwxq" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.134038 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7aeabdb9-1aef-44d2-85e5-c17fcb4290be-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-8rtl5\" (UID: \"7aeabdb9-1aef-44d2-85e5-c17fcb4290be\") " pod="openshift-marketplace/marketplace-operator-79b997595-8rtl5" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.144466 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/7aeabdb9-1aef-44d2-85e5-c17fcb4290be-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-8rtl5\" (UID: \"7aeabdb9-1aef-44d2-85e5-c17fcb4290be\") " pod="openshift-marketplace/marketplace-operator-79b997595-8rtl5" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.149775 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sbvm2\" (UniqueName: \"kubernetes.io/projected/7aeabdb9-1aef-44d2-85e5-c17fcb4290be-kube-api-access-sbvm2\") pod \"marketplace-operator-79b997595-8rtl5\" (UID: \"7aeabdb9-1aef-44d2-85e5-c17fcb4290be\") " pod="openshift-marketplace/marketplace-operator-79b997595-8rtl5" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.221776 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-8rtl5" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.222961 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-6bb55b86b7-942dc"] Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.231322 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/92261e30-74b9-4603-a401-43f1c7baa54b-serving-cert\") pod \"route-controller-manager-b88ff9559-jrwxq\" (UID: \"92261e30-74b9-4603-a401-43f1c7baa54b\") " pod="openshift-route-controller-manager/route-controller-manager-b88ff9559-jrwxq" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.231366 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/75da1bbe-de00-4674-a99f-0ef291146c5f-config\") pod \"controller-manager-697546d85-w8bh9\" (UID: \"75da1bbe-de00-4674-a99f-0ef291146c5f\") " pod="openshift-controller-manager/controller-manager-697546d85-w8bh9" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.231398 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/92261e30-74b9-4603-a401-43f1c7baa54b-config\") pod \"route-controller-manager-b88ff9559-jrwxq\" (UID: \"92261e30-74b9-4603-a401-43f1c7baa54b\") " pod="openshift-route-controller-manager/route-controller-manager-b88ff9559-jrwxq" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.231427 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/75da1bbe-de00-4674-a99f-0ef291146c5f-proxy-ca-bundles\") pod \"controller-manager-697546d85-w8bh9\" (UID: \"75da1bbe-de00-4674-a99f-0ef291146c5f\") " pod="openshift-controller-manager/controller-manager-697546d85-w8bh9" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.231449 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/92261e30-74b9-4603-a401-43f1c7baa54b-client-ca\") pod \"route-controller-manager-b88ff9559-jrwxq\" (UID: \"92261e30-74b9-4603-a401-43f1c7baa54b\") " pod="openshift-route-controller-manager/route-controller-manager-b88ff9559-jrwxq" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.231473 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bkp6b\" (UniqueName: \"kubernetes.io/projected/75da1bbe-de00-4674-a99f-0ef291146c5f-kube-api-access-bkp6b\") pod \"controller-manager-697546d85-w8bh9\" (UID: \"75da1bbe-de00-4674-a99f-0ef291146c5f\") " pod="openshift-controller-manager/controller-manager-697546d85-w8bh9" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.231500 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/75da1bbe-de00-4674-a99f-0ef291146c5f-client-ca\") pod \"controller-manager-697546d85-w8bh9\" (UID: \"75da1bbe-de00-4674-a99f-0ef291146c5f\") " pod="openshift-controller-manager/controller-manager-697546d85-w8bh9" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.231522 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/75da1bbe-de00-4674-a99f-0ef291146c5f-serving-cert\") pod \"controller-manager-697546d85-w8bh9\" (UID: \"75da1bbe-de00-4674-a99f-0ef291146c5f\") " pod="openshift-controller-manager/controller-manager-697546d85-w8bh9" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.231540 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-48zc6\" (UniqueName: \"kubernetes.io/projected/92261e30-74b9-4603-a401-43f1c7baa54b-kube-api-access-48zc6\") pod \"route-controller-manager-b88ff9559-jrwxq\" (UID: \"92261e30-74b9-4603-a401-43f1c7baa54b\") " pod="openshift-route-controller-manager/route-controller-manager-b88ff9559-jrwxq" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.233280 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/92261e30-74b9-4603-a401-43f1c7baa54b-client-ca\") pod \"route-controller-manager-b88ff9559-jrwxq\" (UID: \"92261e30-74b9-4603-a401-43f1c7baa54b\") " pod="openshift-route-controller-manager/route-controller-manager-b88ff9559-jrwxq" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.233278 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/75da1bbe-de00-4674-a99f-0ef291146c5f-proxy-ca-bundles\") pod \"controller-manager-697546d85-w8bh9\" (UID: \"75da1bbe-de00-4674-a99f-0ef291146c5f\") " pod="openshift-controller-manager/controller-manager-697546d85-w8bh9" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.234006 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/75da1bbe-de00-4674-a99f-0ef291146c5f-client-ca\") pod \"controller-manager-697546d85-w8bh9\" (UID: \"75da1bbe-de00-4674-a99f-0ef291146c5f\") " pod="openshift-controller-manager/controller-manager-697546d85-w8bh9" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.237234 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/75da1bbe-de00-4674-a99f-0ef291146c5f-serving-cert\") pod \"controller-manager-697546d85-w8bh9\" (UID: \"75da1bbe-de00-4674-a99f-0ef291146c5f\") " pod="openshift-controller-manager/controller-manager-697546d85-w8bh9" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.237288 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/92261e30-74b9-4603-a401-43f1c7baa54b-config\") pod \"route-controller-manager-b88ff9559-jrwxq\" (UID: \"92261e30-74b9-4603-a401-43f1c7baa54b\") " pod="openshift-route-controller-manager/route-controller-manager-b88ff9559-jrwxq" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.238785 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-6bb55b86b7-942dc"] Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.239516 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/75da1bbe-de00-4674-a99f-0ef291146c5f-config\") pod \"controller-manager-697546d85-w8bh9\" (UID: \"75da1bbe-de00-4674-a99f-0ef291146c5f\") " pod="openshift-controller-manager/controller-manager-697546d85-w8bh9" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.244363 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/92261e30-74b9-4603-a401-43f1c7baa54b-serving-cert\") pod \"route-controller-manager-b88ff9559-jrwxq\" (UID: \"92261e30-74b9-4603-a401-43f1c7baa54b\") " pod="openshift-route-controller-manager/route-controller-manager-b88ff9559-jrwxq" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.248868 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-48zc6\" (UniqueName: \"kubernetes.io/projected/92261e30-74b9-4603-a401-43f1c7baa54b-kube-api-access-48zc6\") pod \"route-controller-manager-b88ff9559-jrwxq\" (UID: \"92261e30-74b9-4603-a401-43f1c7baa54b\") " pod="openshift-route-controller-manager/route-controller-manager-b88ff9559-jrwxq" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.249188 4753 scope.go:117] "RemoveContainer" containerID="9803a350efc9bdb34357e8bee1f6df8f49cfe2b7dc6456d03a34a837bb7c9dc4" Dec 05 17:10:30 crc kubenswrapper[4753]: E1205 17:10:30.250433 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9803a350efc9bdb34357e8bee1f6df8f49cfe2b7dc6456d03a34a837bb7c9dc4\": container with ID starting with 9803a350efc9bdb34357e8bee1f6df8f49cfe2b7dc6456d03a34a837bb7c9dc4 not found: ID does not exist" containerID="9803a350efc9bdb34357e8bee1f6df8f49cfe2b7dc6456d03a34a837bb7c9dc4" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.250485 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9803a350efc9bdb34357e8bee1f6df8f49cfe2b7dc6456d03a34a837bb7c9dc4"} err="failed to get container status \"9803a350efc9bdb34357e8bee1f6df8f49cfe2b7dc6456d03a34a837bb7c9dc4\": rpc error: code = NotFound desc = could not find container \"9803a350efc9bdb34357e8bee1f6df8f49cfe2b7dc6456d03a34a837bb7c9dc4\": container with ID starting with 9803a350efc9bdb34357e8bee1f6df8f49cfe2b7dc6456d03a34a837bb7c9dc4 not found: ID does not exist" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.250523 4753 scope.go:117] "RemoveContainer" containerID="64482c92af3e7c071f86c9f7c660375c435131883c50b1b83cbb06498692b915" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.250866 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dgpzc" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.255653 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bkp6b\" (UniqueName: \"kubernetes.io/projected/75da1bbe-de00-4674-a99f-0ef291146c5f-kube-api-access-bkp6b\") pod \"controller-manager-697546d85-w8bh9\" (UID: \"75da1bbe-de00-4674-a99f-0ef291146c5f\") " pod="openshift-controller-manager/controller-manager-697546d85-w8bh9" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.302829 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cb4ng" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.309728 4753 scope.go:117] "RemoveContainer" containerID="64482c92af3e7c071f86c9f7c660375c435131883c50b1b83cbb06498692b915" Dec 05 17:10:30 crc kubenswrapper[4753]: E1205 17:10:30.310310 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"64482c92af3e7c071f86c9f7c660375c435131883c50b1b83cbb06498692b915\": container with ID starting with 64482c92af3e7c071f86c9f7c660375c435131883c50b1b83cbb06498692b915 not found: ID does not exist" containerID="64482c92af3e7c071f86c9f7c660375c435131883c50b1b83cbb06498692b915" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.310347 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"64482c92af3e7c071f86c9f7c660375c435131883c50b1b83cbb06498692b915"} err="failed to get container status \"64482c92af3e7c071f86c9f7c660375c435131883c50b1b83cbb06498692b915\": rpc error: code = NotFound desc = could not find container \"64482c92af3e7c071f86c9f7c660375c435131883c50b1b83cbb06498692b915\": container with ID starting with 64482c92af3e7c071f86c9f7c660375c435131883c50b1b83cbb06498692b915 not found: ID does not exist" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.393083 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-s87ds" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.399380 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-c4kqm" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.405858 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6ggrp" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.439948 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5560048a-c9e9-4743-9573-1b58a2240c29-utilities\") pod \"5560048a-c9e9-4743-9573-1b58a2240c29\" (UID: \"5560048a-c9e9-4743-9573-1b58a2240c29\") " Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.440081 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5nw7t\" (UniqueName: \"kubernetes.io/projected/5560048a-c9e9-4743-9573-1b58a2240c29-kube-api-access-5nw7t\") pod \"5560048a-c9e9-4743-9573-1b58a2240c29\" (UID: \"5560048a-c9e9-4743-9573-1b58a2240c29\") " Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.440149 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5560048a-c9e9-4743-9573-1b58a2240c29-catalog-content\") pod \"5560048a-c9e9-4743-9573-1b58a2240c29\" (UID: \"5560048a-c9e9-4743-9573-1b58a2240c29\") " Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.440215 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/db1dcb29-b751-4a77-8f2e-68efadf955b9-utilities\") pod \"db1dcb29-b751-4a77-8f2e-68efadf955b9\" (UID: \"db1dcb29-b751-4a77-8f2e-68efadf955b9\") " Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.440266 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xnxg8\" (UniqueName: \"kubernetes.io/projected/db1dcb29-b751-4a77-8f2e-68efadf955b9-kube-api-access-xnxg8\") pod \"db1dcb29-b751-4a77-8f2e-68efadf955b9\" (UID: \"db1dcb29-b751-4a77-8f2e-68efadf955b9\") " Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.440351 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/db1dcb29-b751-4a77-8f2e-68efadf955b9-catalog-content\") pod \"db1dcb29-b751-4a77-8f2e-68efadf955b9\" (UID: \"db1dcb29-b751-4a77-8f2e-68efadf955b9\") " Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.442373 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/db1dcb29-b751-4a77-8f2e-68efadf955b9-utilities" (OuterVolumeSpecName: "utilities") pod "db1dcb29-b751-4a77-8f2e-68efadf955b9" (UID: "db1dcb29-b751-4a77-8f2e-68efadf955b9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.442774 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5560048a-c9e9-4743-9573-1b58a2240c29-utilities" (OuterVolumeSpecName: "utilities") pod "5560048a-c9e9-4743-9573-1b58a2240c29" (UID: "5560048a-c9e9-4743-9573-1b58a2240c29"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.445915 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5560048a-c9e9-4743-9573-1b58a2240c29-kube-api-access-5nw7t" (OuterVolumeSpecName: "kube-api-access-5nw7t") pod "5560048a-c9e9-4743-9573-1b58a2240c29" (UID: "5560048a-c9e9-4743-9573-1b58a2240c29"). InnerVolumeSpecName "kube-api-access-5nw7t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.453517 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/db1dcb29-b751-4a77-8f2e-68efadf955b9-kube-api-access-xnxg8" (OuterVolumeSpecName: "kube-api-access-xnxg8") pod "db1dcb29-b751-4a77-8f2e-68efadf955b9" (UID: "db1dcb29-b751-4a77-8f2e-68efadf955b9"). InnerVolumeSpecName "kube-api-access-xnxg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.460588 4753 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5560048a-c9e9-4743-9573-1b58a2240c29-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.460617 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5nw7t\" (UniqueName: \"kubernetes.io/projected/5560048a-c9e9-4743-9573-1b58a2240c29-kube-api-access-5nw7t\") on node \"crc\" DevicePath \"\"" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.460632 4753 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/db1dcb29-b751-4a77-8f2e-68efadf955b9-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.460663 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xnxg8\" (UniqueName: \"kubernetes.io/projected/db1dcb29-b751-4a77-8f2e-68efadf955b9-kube-api-access-xnxg8\") on node \"crc\" DevicePath \"\"" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.506028 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/db1dcb29-b751-4a77-8f2e-68efadf955b9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "db1dcb29-b751-4a77-8f2e-68efadf955b9" (UID: "db1dcb29-b751-4a77-8f2e-68efadf955b9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.533349 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-697546d85-w8bh9" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.538449 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-b88ff9559-jrwxq" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.554770 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5560048a-c9e9-4743-9573-1b58a2240c29-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5560048a-c9e9-4743-9573-1b58a2240c29" (UID: "5560048a-c9e9-4743-9573-1b58a2240c29"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.562570 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rphmt\" (UniqueName: \"kubernetes.io/projected/03fd3f93-fb41-4e32-8276-416c40f2b9a7-kube-api-access-rphmt\") pod \"03fd3f93-fb41-4e32-8276-416c40f2b9a7\" (UID: \"03fd3f93-fb41-4e32-8276-416c40f2b9a7\") " Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.562629 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/36b2ef01-2c7a-4313-a03a-be77a660d987-catalog-content\") pod \"36b2ef01-2c7a-4313-a03a-be77a660d987\" (UID: \"36b2ef01-2c7a-4313-a03a-be77a660d987\") " Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.562662 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee9a0d97-2cf8-48bc-b90a-59f0c4c3b76b-catalog-content\") pod \"ee9a0d97-2cf8-48bc-b90a-59f0c4c3b76b\" (UID: \"ee9a0d97-2cf8-48bc-b90a-59f0c4c3b76b\") " Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.562711 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v49dc\" (UniqueName: \"kubernetes.io/projected/ee9a0d97-2cf8-48bc-b90a-59f0c4c3b76b-kube-api-access-v49dc\") pod \"ee9a0d97-2cf8-48bc-b90a-59f0c4c3b76b\" (UID: \"ee9a0d97-2cf8-48bc-b90a-59f0c4c3b76b\") " Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.562756 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/03fd3f93-fb41-4e32-8276-416c40f2b9a7-marketplace-operator-metrics\") pod \"03fd3f93-fb41-4e32-8276-416c40f2b9a7\" (UID: \"03fd3f93-fb41-4e32-8276-416c40f2b9a7\") " Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.562854 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/03fd3f93-fb41-4e32-8276-416c40f2b9a7-marketplace-trusted-ca\") pod \"03fd3f93-fb41-4e32-8276-416c40f2b9a7\" (UID: \"03fd3f93-fb41-4e32-8276-416c40f2b9a7\") " Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.562884 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/36b2ef01-2c7a-4313-a03a-be77a660d987-utilities\") pod \"36b2ef01-2c7a-4313-a03a-be77a660d987\" (UID: \"36b2ef01-2c7a-4313-a03a-be77a660d987\") " Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.562917 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee9a0d97-2cf8-48bc-b90a-59f0c4c3b76b-utilities\") pod \"ee9a0d97-2cf8-48bc-b90a-59f0c4c3b76b\" (UID: \"ee9a0d97-2cf8-48bc-b90a-59f0c4c3b76b\") " Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.562955 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqs4q\" (UniqueName: \"kubernetes.io/projected/36b2ef01-2c7a-4313-a03a-be77a660d987-kube-api-access-fqs4q\") pod \"36b2ef01-2c7a-4313-a03a-be77a660d987\" (UID: \"36b2ef01-2c7a-4313-a03a-be77a660d987\") " Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.563487 4753 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/db1dcb29-b751-4a77-8f2e-68efadf955b9-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.563507 4753 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5560048a-c9e9-4743-9573-1b58a2240c29-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.564066 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/03fd3f93-fb41-4e32-8276-416c40f2b9a7-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "03fd3f93-fb41-4e32-8276-416c40f2b9a7" (UID: "03fd3f93-fb41-4e32-8276-416c40f2b9a7"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.564293 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ee9a0d97-2cf8-48bc-b90a-59f0c4c3b76b-utilities" (OuterVolumeSpecName: "utilities") pod "ee9a0d97-2cf8-48bc-b90a-59f0c4c3b76b" (UID: "ee9a0d97-2cf8-48bc-b90a-59f0c4c3b76b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.568331 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ee9a0d97-2cf8-48bc-b90a-59f0c4c3b76b-kube-api-access-v49dc" (OuterVolumeSpecName: "kube-api-access-v49dc") pod "ee9a0d97-2cf8-48bc-b90a-59f0c4c3b76b" (UID: "ee9a0d97-2cf8-48bc-b90a-59f0c4c3b76b"). InnerVolumeSpecName "kube-api-access-v49dc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.568632 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/03fd3f93-fb41-4e32-8276-416c40f2b9a7-kube-api-access-rphmt" (OuterVolumeSpecName: "kube-api-access-rphmt") pod "03fd3f93-fb41-4e32-8276-416c40f2b9a7" (UID: "03fd3f93-fb41-4e32-8276-416c40f2b9a7"). InnerVolumeSpecName "kube-api-access-rphmt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.569302 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/36b2ef01-2c7a-4313-a03a-be77a660d987-kube-api-access-fqs4q" (OuterVolumeSpecName: "kube-api-access-fqs4q") pod "36b2ef01-2c7a-4313-a03a-be77a660d987" (UID: "36b2ef01-2c7a-4313-a03a-be77a660d987"). InnerVolumeSpecName "kube-api-access-fqs4q". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.570573 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/03fd3f93-fb41-4e32-8276-416c40f2b9a7-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "03fd3f93-fb41-4e32-8276-416c40f2b9a7" (UID: "03fd3f93-fb41-4e32-8276-416c40f2b9a7"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.575329 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/36b2ef01-2c7a-4313-a03a-be77a660d987-utilities" (OuterVolumeSpecName: "utilities") pod "36b2ef01-2c7a-4313-a03a-be77a660d987" (UID: "36b2ef01-2c7a-4313-a03a-be77a660d987"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.591142 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ee9a0d97-2cf8-48bc-b90a-59f0c4c3b76b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ee9a0d97-2cf8-48bc-b90a-59f0c4c3b76b" (UID: "ee9a0d97-2cf8-48bc-b90a-59f0c4c3b76b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.664641 4753 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/36b2ef01-2c7a-4313-a03a-be77a660d987-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.665197 4753 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/03fd3f93-fb41-4e32-8276-416c40f2b9a7-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.665216 4753 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ee9a0d97-2cf8-48bc-b90a-59f0c4c3b76b-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.665231 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqs4q\" (UniqueName: \"kubernetes.io/projected/36b2ef01-2c7a-4313-a03a-be77a660d987-kube-api-access-fqs4q\") on node \"crc\" DevicePath \"\"" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.665243 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rphmt\" (UniqueName: \"kubernetes.io/projected/03fd3f93-fb41-4e32-8276-416c40f2b9a7-kube-api-access-rphmt\") on node \"crc\" DevicePath \"\"" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.665258 4753 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ee9a0d97-2cf8-48bc-b90a-59f0c4c3b76b-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.665270 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v49dc\" (UniqueName: \"kubernetes.io/projected/ee9a0d97-2cf8-48bc-b90a-59f0c4c3b76b-kube-api-access-v49dc\") on node \"crc\" DevicePath \"\"" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.665301 4753 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/03fd3f93-fb41-4e32-8276-416c40f2b9a7-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.713839 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/36b2ef01-2c7a-4313-a03a-be77a660d987-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "36b2ef01-2c7a-4313-a03a-be77a660d987" (UID: "36b2ef01-2c7a-4313-a03a-be77a660d987"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.750678 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-697546d85-w8bh9"] Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.767960 4753 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/36b2ef01-2c7a-4313-a03a-be77a660d987-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.789679 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-8rtl5"] Dec 05 17:10:30 crc kubenswrapper[4753]: W1205 17:10:30.794652 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod92261e30_74b9_4603_a401_43f1c7baa54b.slice/crio-2889998dcb0559dbded873c9372906ee9b9e92b38af412955f67683ac79e715a WatchSource:0}: Error finding container 2889998dcb0559dbded873c9372906ee9b9e92b38af412955f67683ac79e715a: Status 404 returned error can't find the container with id 2889998dcb0559dbded873c9372906ee9b9e92b38af412955f67683ac79e715a Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.794681 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-b88ff9559-jrwxq"] Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.877942 4753 generic.go:334] "Generic (PLEG): container finished" podID="db1dcb29-b751-4a77-8f2e-68efadf955b9" containerID="905f329b9e0d4b8141b862cca0e0475439f65fc03c4b986da1771fc05b0b3045" exitCode=0 Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.878058 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cb4ng" event={"ID":"db1dcb29-b751-4a77-8f2e-68efadf955b9","Type":"ContainerDied","Data":"905f329b9e0d4b8141b862cca0e0475439f65fc03c4b986da1771fc05b0b3045"} Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.878109 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cb4ng" event={"ID":"db1dcb29-b751-4a77-8f2e-68efadf955b9","Type":"ContainerDied","Data":"7f1e6bfaf13c4959842c0f22c4aa4f4a2edb8f397a72edb5b610fe6797187a5c"} Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.878139 4753 scope.go:117] "RemoveContainer" containerID="905f329b9e0d4b8141b862cca0e0475439f65fc03c4b986da1771fc05b0b3045" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.878180 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cb4ng" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.884093 4753 generic.go:334] "Generic (PLEG): container finished" podID="36b2ef01-2c7a-4313-a03a-be77a660d987" containerID="1a12fe20398cc5661c131c9376f28ec36f30003882c9933cc82a40021ab9b3a4" exitCode=0 Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.884261 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6ggrp" event={"ID":"36b2ef01-2c7a-4313-a03a-be77a660d987","Type":"ContainerDied","Data":"1a12fe20398cc5661c131c9376f28ec36f30003882c9933cc82a40021ab9b3a4"} Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.884301 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6ggrp" event={"ID":"36b2ef01-2c7a-4313-a03a-be77a660d987","Type":"ContainerDied","Data":"fa0e729857e34deff1436c162cff22b0b1de2a3b947841d6ad007e5ea894f594"} Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.884443 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6ggrp" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.892252 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-697546d85-w8bh9" event={"ID":"75da1bbe-de00-4674-a99f-0ef291146c5f","Type":"ContainerStarted","Data":"1caa7b4127289bc82327efa4091ab0506640c09d85312e90ba65778495cadd50"} Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.900349 4753 generic.go:334] "Generic (PLEG): container finished" podID="ee9a0d97-2cf8-48bc-b90a-59f0c4c3b76b" containerID="1bd24ff8a3b303afe709884c1a501f9b3a4a44ed9c7bd565ee466ab3d370dee7" exitCode=0 Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.900412 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-c4kqm" event={"ID":"ee9a0d97-2cf8-48bc-b90a-59f0c4c3b76b","Type":"ContainerDied","Data":"1bd24ff8a3b303afe709884c1a501f9b3a4a44ed9c7bd565ee466ab3d370dee7"} Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.900440 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-c4kqm" event={"ID":"ee9a0d97-2cf8-48bc-b90a-59f0c4c3b76b","Type":"ContainerDied","Data":"c0625d794b7997243e6de75dbfbd2ec5763c1d9b21a8af3ff0dc318ffb1bbf6a"} Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.900547 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-c4kqm" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.904887 4753 generic.go:334] "Generic (PLEG): container finished" podID="03fd3f93-fb41-4e32-8276-416c40f2b9a7" containerID="eb799446224d7545cfa481b5721a5a1747e0fdfbc7bcd537eb6ec235acfb30a1" exitCode=0 Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.904960 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-s87ds" event={"ID":"03fd3f93-fb41-4e32-8276-416c40f2b9a7","Type":"ContainerDied","Data":"eb799446224d7545cfa481b5721a5a1747e0fdfbc7bcd537eb6ec235acfb30a1"} Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.904989 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-s87ds" event={"ID":"03fd3f93-fb41-4e32-8276-416c40f2b9a7","Type":"ContainerDied","Data":"6f743b9240b7cd5a1999f9913cfc814045eed2770b705f12f3dc561423e322c8"} Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.905078 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-s87ds" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.908855 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-b88ff9559-jrwxq" event={"ID":"92261e30-74b9-4603-a401-43f1c7baa54b","Type":"ContainerStarted","Data":"2889998dcb0559dbded873c9372906ee9b9e92b38af412955f67683ac79e715a"} Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.915307 4753 generic.go:334] "Generic (PLEG): container finished" podID="5560048a-c9e9-4743-9573-1b58a2240c29" containerID="f5933bb2c58a4c2827813946aed957f3d8f26119b2c0827b98048b8f4da09914" exitCode=0 Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.915386 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dgpzc" event={"ID":"5560048a-c9e9-4743-9573-1b58a2240c29","Type":"ContainerDied","Data":"f5933bb2c58a4c2827813946aed957f3d8f26119b2c0827b98048b8f4da09914"} Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.915418 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-dgpzc" event={"ID":"5560048a-c9e9-4743-9573-1b58a2240c29","Type":"ContainerDied","Data":"09458331ac649eca958921cb2fd470111657af9ec584895fcfc99390f4803626"} Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.915599 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-dgpzc" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.920867 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-8rtl5" event={"ID":"7aeabdb9-1aef-44d2-85e5-c17fcb4290be","Type":"ContainerStarted","Data":"fac12ea0b48abf0e0bca39820cd1c83a170d329d13d4f533756917881749a51a"} Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.933106 4753 scope.go:117] "RemoveContainer" containerID="331de34d818762fb3b501d331960abf547ec5d6319dfa65f42de063f2f6ce82d" Dec 05 17:10:30 crc kubenswrapper[4753]: I1205 17:10:30.981718 4753 scope.go:117] "RemoveContainer" containerID="c66118d18d05a81bf20576fda36ba01bd90d14a27420d888c158eeda733dc33c" Dec 05 17:10:31 crc kubenswrapper[4753]: I1205 17:10:31.046883 4753 scope.go:117] "RemoveContainer" containerID="905f329b9e0d4b8141b862cca0e0475439f65fc03c4b986da1771fc05b0b3045" Dec 05 17:10:31 crc kubenswrapper[4753]: E1205 17:10:31.051503 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"905f329b9e0d4b8141b862cca0e0475439f65fc03c4b986da1771fc05b0b3045\": container with ID starting with 905f329b9e0d4b8141b862cca0e0475439f65fc03c4b986da1771fc05b0b3045 not found: ID does not exist" containerID="905f329b9e0d4b8141b862cca0e0475439f65fc03c4b986da1771fc05b0b3045" Dec 05 17:10:31 crc kubenswrapper[4753]: I1205 17:10:31.051595 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"905f329b9e0d4b8141b862cca0e0475439f65fc03c4b986da1771fc05b0b3045"} err="failed to get container status \"905f329b9e0d4b8141b862cca0e0475439f65fc03c4b986da1771fc05b0b3045\": rpc error: code = NotFound desc = could not find container \"905f329b9e0d4b8141b862cca0e0475439f65fc03c4b986da1771fc05b0b3045\": container with ID starting with 905f329b9e0d4b8141b862cca0e0475439f65fc03c4b986da1771fc05b0b3045 not found: ID does not exist" Dec 05 17:10:31 crc kubenswrapper[4753]: I1205 17:10:31.051646 4753 scope.go:117] "RemoveContainer" containerID="331de34d818762fb3b501d331960abf547ec5d6319dfa65f42de063f2f6ce82d" Dec 05 17:10:31 crc kubenswrapper[4753]: E1205 17:10:31.053818 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"331de34d818762fb3b501d331960abf547ec5d6319dfa65f42de063f2f6ce82d\": container with ID starting with 331de34d818762fb3b501d331960abf547ec5d6319dfa65f42de063f2f6ce82d not found: ID does not exist" containerID="331de34d818762fb3b501d331960abf547ec5d6319dfa65f42de063f2f6ce82d" Dec 05 17:10:31 crc kubenswrapper[4753]: I1205 17:10:31.053863 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"331de34d818762fb3b501d331960abf547ec5d6319dfa65f42de063f2f6ce82d"} err="failed to get container status \"331de34d818762fb3b501d331960abf547ec5d6319dfa65f42de063f2f6ce82d\": rpc error: code = NotFound desc = could not find container \"331de34d818762fb3b501d331960abf547ec5d6319dfa65f42de063f2f6ce82d\": container with ID starting with 331de34d818762fb3b501d331960abf547ec5d6319dfa65f42de063f2f6ce82d not found: ID does not exist" Dec 05 17:10:31 crc kubenswrapper[4753]: I1205 17:10:31.053912 4753 scope.go:117] "RemoveContainer" containerID="c66118d18d05a81bf20576fda36ba01bd90d14a27420d888c158eeda733dc33c" Dec 05 17:10:31 crc kubenswrapper[4753]: E1205 17:10:31.054581 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c66118d18d05a81bf20576fda36ba01bd90d14a27420d888c158eeda733dc33c\": container with ID starting with c66118d18d05a81bf20576fda36ba01bd90d14a27420d888c158eeda733dc33c not found: ID does not exist" containerID="c66118d18d05a81bf20576fda36ba01bd90d14a27420d888c158eeda733dc33c" Dec 05 17:10:31 crc kubenswrapper[4753]: I1205 17:10:31.054651 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c66118d18d05a81bf20576fda36ba01bd90d14a27420d888c158eeda733dc33c"} err="failed to get container status \"c66118d18d05a81bf20576fda36ba01bd90d14a27420d888c158eeda733dc33c\": rpc error: code = NotFound desc = could not find container \"c66118d18d05a81bf20576fda36ba01bd90d14a27420d888c158eeda733dc33c\": container with ID starting with c66118d18d05a81bf20576fda36ba01bd90d14a27420d888c158eeda733dc33c not found: ID does not exist" Dec 05 17:10:31 crc kubenswrapper[4753]: I1205 17:10:31.054698 4753 scope.go:117] "RemoveContainer" containerID="1a12fe20398cc5661c131c9376f28ec36f30003882c9933cc82a40021ab9b3a4" Dec 05 17:10:31 crc kubenswrapper[4753]: I1205 17:10:31.057910 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-c4kqm"] Dec 05 17:10:31 crc kubenswrapper[4753]: I1205 17:10:31.067973 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-c4kqm"] Dec 05 17:10:31 crc kubenswrapper[4753]: I1205 17:10:31.073758 4753 scope.go:117] "RemoveContainer" containerID="679725a1f96effba01879455a097ec3924d1661dcb0d70f12ef8295c5221a4f5" Dec 05 17:10:31 crc kubenswrapper[4753]: I1205 17:10:31.077168 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-cb4ng"] Dec 05 17:10:31 crc kubenswrapper[4753]: I1205 17:10:31.082060 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-cb4ng"] Dec 05 17:10:31 crc kubenswrapper[4753]: I1205 17:10:31.091023 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-dgpzc"] Dec 05 17:10:31 crc kubenswrapper[4753]: I1205 17:10:31.100465 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-dgpzc"] Dec 05 17:10:31 crc kubenswrapper[4753]: I1205 17:10:31.108339 4753 scope.go:117] "RemoveContainer" containerID="cdd6a28ae4b4fe5b9b0fc581e624c7ca3a35a71767ab7a44fad7a21b58e7ccf3" Dec 05 17:10:31 crc kubenswrapper[4753]: I1205 17:10:31.115682 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-6ggrp"] Dec 05 17:10:31 crc kubenswrapper[4753]: I1205 17:10:31.121199 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-6ggrp"] Dec 05 17:10:31 crc kubenswrapper[4753]: I1205 17:10:31.131669 4753 scope.go:117] "RemoveContainer" containerID="1a12fe20398cc5661c131c9376f28ec36f30003882c9933cc82a40021ab9b3a4" Dec 05 17:10:31 crc kubenswrapper[4753]: E1205 17:10:31.132938 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1a12fe20398cc5661c131c9376f28ec36f30003882c9933cc82a40021ab9b3a4\": container with ID starting with 1a12fe20398cc5661c131c9376f28ec36f30003882c9933cc82a40021ab9b3a4 not found: ID does not exist" containerID="1a12fe20398cc5661c131c9376f28ec36f30003882c9933cc82a40021ab9b3a4" Dec 05 17:10:31 crc kubenswrapper[4753]: I1205 17:10:31.132980 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1a12fe20398cc5661c131c9376f28ec36f30003882c9933cc82a40021ab9b3a4"} err="failed to get container status \"1a12fe20398cc5661c131c9376f28ec36f30003882c9933cc82a40021ab9b3a4\": rpc error: code = NotFound desc = could not find container \"1a12fe20398cc5661c131c9376f28ec36f30003882c9933cc82a40021ab9b3a4\": container with ID starting with 1a12fe20398cc5661c131c9376f28ec36f30003882c9933cc82a40021ab9b3a4 not found: ID does not exist" Dec 05 17:10:31 crc kubenswrapper[4753]: I1205 17:10:31.133013 4753 scope.go:117] "RemoveContainer" containerID="679725a1f96effba01879455a097ec3924d1661dcb0d70f12ef8295c5221a4f5" Dec 05 17:10:31 crc kubenswrapper[4753]: E1205 17:10:31.133645 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"679725a1f96effba01879455a097ec3924d1661dcb0d70f12ef8295c5221a4f5\": container with ID starting with 679725a1f96effba01879455a097ec3924d1661dcb0d70f12ef8295c5221a4f5 not found: ID does not exist" containerID="679725a1f96effba01879455a097ec3924d1661dcb0d70f12ef8295c5221a4f5" Dec 05 17:10:31 crc kubenswrapper[4753]: I1205 17:10:31.133738 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"679725a1f96effba01879455a097ec3924d1661dcb0d70f12ef8295c5221a4f5"} err="failed to get container status \"679725a1f96effba01879455a097ec3924d1661dcb0d70f12ef8295c5221a4f5\": rpc error: code = NotFound desc = could not find container \"679725a1f96effba01879455a097ec3924d1661dcb0d70f12ef8295c5221a4f5\": container with ID starting with 679725a1f96effba01879455a097ec3924d1661dcb0d70f12ef8295c5221a4f5 not found: ID does not exist" Dec 05 17:10:31 crc kubenswrapper[4753]: I1205 17:10:31.133772 4753 scope.go:117] "RemoveContainer" containerID="cdd6a28ae4b4fe5b9b0fc581e624c7ca3a35a71767ab7a44fad7a21b58e7ccf3" Dec 05 17:10:31 crc kubenswrapper[4753]: I1205 17:10:31.133853 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-s87ds"] Dec 05 17:10:31 crc kubenswrapper[4753]: E1205 17:10:31.134068 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cdd6a28ae4b4fe5b9b0fc581e624c7ca3a35a71767ab7a44fad7a21b58e7ccf3\": container with ID starting with cdd6a28ae4b4fe5b9b0fc581e624c7ca3a35a71767ab7a44fad7a21b58e7ccf3 not found: ID does not exist" containerID="cdd6a28ae4b4fe5b9b0fc581e624c7ca3a35a71767ab7a44fad7a21b58e7ccf3" Dec 05 17:10:31 crc kubenswrapper[4753]: I1205 17:10:31.134108 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cdd6a28ae4b4fe5b9b0fc581e624c7ca3a35a71767ab7a44fad7a21b58e7ccf3"} err="failed to get container status \"cdd6a28ae4b4fe5b9b0fc581e624c7ca3a35a71767ab7a44fad7a21b58e7ccf3\": rpc error: code = NotFound desc = could not find container \"cdd6a28ae4b4fe5b9b0fc581e624c7ca3a35a71767ab7a44fad7a21b58e7ccf3\": container with ID starting with cdd6a28ae4b4fe5b9b0fc581e624c7ca3a35a71767ab7a44fad7a21b58e7ccf3 not found: ID does not exist" Dec 05 17:10:31 crc kubenswrapper[4753]: I1205 17:10:31.134123 4753 scope.go:117] "RemoveContainer" containerID="1bd24ff8a3b303afe709884c1a501f9b3a4a44ed9c7bd565ee466ab3d370dee7" Dec 05 17:10:31 crc kubenswrapper[4753]: I1205 17:10:31.144463 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-s87ds"] Dec 05 17:10:31 crc kubenswrapper[4753]: I1205 17:10:31.151699 4753 scope.go:117] "RemoveContainer" containerID="5e37e96162774e1c60e5b940dbeaa76fb4001dfdba6cd2719bb42cc73621a26d" Dec 05 17:10:31 crc kubenswrapper[4753]: I1205 17:10:31.202461 4753 scope.go:117] "RemoveContainer" containerID="2a49e811407605fc56b5bbf7867ff2858a9bc1521c3ce1c923e7542fb204d551" Dec 05 17:10:31 crc kubenswrapper[4753]: I1205 17:10:31.229050 4753 scope.go:117] "RemoveContainer" containerID="1bd24ff8a3b303afe709884c1a501f9b3a4a44ed9c7bd565ee466ab3d370dee7" Dec 05 17:10:31 crc kubenswrapper[4753]: E1205 17:10:31.231130 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1bd24ff8a3b303afe709884c1a501f9b3a4a44ed9c7bd565ee466ab3d370dee7\": container with ID starting with 1bd24ff8a3b303afe709884c1a501f9b3a4a44ed9c7bd565ee466ab3d370dee7 not found: ID does not exist" containerID="1bd24ff8a3b303afe709884c1a501f9b3a4a44ed9c7bd565ee466ab3d370dee7" Dec 05 17:10:31 crc kubenswrapper[4753]: I1205 17:10:31.231217 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1bd24ff8a3b303afe709884c1a501f9b3a4a44ed9c7bd565ee466ab3d370dee7"} err="failed to get container status \"1bd24ff8a3b303afe709884c1a501f9b3a4a44ed9c7bd565ee466ab3d370dee7\": rpc error: code = NotFound desc = could not find container \"1bd24ff8a3b303afe709884c1a501f9b3a4a44ed9c7bd565ee466ab3d370dee7\": container with ID starting with 1bd24ff8a3b303afe709884c1a501f9b3a4a44ed9c7bd565ee466ab3d370dee7 not found: ID does not exist" Dec 05 17:10:31 crc kubenswrapper[4753]: I1205 17:10:31.231261 4753 scope.go:117] "RemoveContainer" containerID="5e37e96162774e1c60e5b940dbeaa76fb4001dfdba6cd2719bb42cc73621a26d" Dec 05 17:10:31 crc kubenswrapper[4753]: E1205 17:10:31.231741 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5e37e96162774e1c60e5b940dbeaa76fb4001dfdba6cd2719bb42cc73621a26d\": container with ID starting with 5e37e96162774e1c60e5b940dbeaa76fb4001dfdba6cd2719bb42cc73621a26d not found: ID does not exist" containerID="5e37e96162774e1c60e5b940dbeaa76fb4001dfdba6cd2719bb42cc73621a26d" Dec 05 17:10:31 crc kubenswrapper[4753]: I1205 17:10:31.231821 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5e37e96162774e1c60e5b940dbeaa76fb4001dfdba6cd2719bb42cc73621a26d"} err="failed to get container status \"5e37e96162774e1c60e5b940dbeaa76fb4001dfdba6cd2719bb42cc73621a26d\": rpc error: code = NotFound desc = could not find container \"5e37e96162774e1c60e5b940dbeaa76fb4001dfdba6cd2719bb42cc73621a26d\": container with ID starting with 5e37e96162774e1c60e5b940dbeaa76fb4001dfdba6cd2719bb42cc73621a26d not found: ID does not exist" Dec 05 17:10:31 crc kubenswrapper[4753]: I1205 17:10:31.231888 4753 scope.go:117] "RemoveContainer" containerID="2a49e811407605fc56b5bbf7867ff2858a9bc1521c3ce1c923e7542fb204d551" Dec 05 17:10:31 crc kubenswrapper[4753]: E1205 17:10:31.232214 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2a49e811407605fc56b5bbf7867ff2858a9bc1521c3ce1c923e7542fb204d551\": container with ID starting with 2a49e811407605fc56b5bbf7867ff2858a9bc1521c3ce1c923e7542fb204d551 not found: ID does not exist" containerID="2a49e811407605fc56b5bbf7867ff2858a9bc1521c3ce1c923e7542fb204d551" Dec 05 17:10:31 crc kubenswrapper[4753]: I1205 17:10:31.232257 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2a49e811407605fc56b5bbf7867ff2858a9bc1521c3ce1c923e7542fb204d551"} err="failed to get container status \"2a49e811407605fc56b5bbf7867ff2858a9bc1521c3ce1c923e7542fb204d551\": rpc error: code = NotFound desc = could not find container \"2a49e811407605fc56b5bbf7867ff2858a9bc1521c3ce1c923e7542fb204d551\": container with ID starting with 2a49e811407605fc56b5bbf7867ff2858a9bc1521c3ce1c923e7542fb204d551 not found: ID does not exist" Dec 05 17:10:31 crc kubenswrapper[4753]: I1205 17:10:31.232281 4753 scope.go:117] "RemoveContainer" containerID="eb799446224d7545cfa481b5721a5a1747e0fdfbc7bcd537eb6ec235acfb30a1" Dec 05 17:10:31 crc kubenswrapper[4753]: I1205 17:10:31.259597 4753 scope.go:117] "RemoveContainer" containerID="5be7c5960ffb2fc15b84d5390bf5773ad1ba4843c0fb43e769be20a63f92d632" Dec 05 17:10:31 crc kubenswrapper[4753]: I1205 17:10:31.261901 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-j7hx9" Dec 05 17:10:31 crc kubenswrapper[4753]: I1205 17:10:31.279478 4753 scope.go:117] "RemoveContainer" containerID="eb799446224d7545cfa481b5721a5a1747e0fdfbc7bcd537eb6ec235acfb30a1" Dec 05 17:10:31 crc kubenswrapper[4753]: E1205 17:10:31.280850 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eb799446224d7545cfa481b5721a5a1747e0fdfbc7bcd537eb6ec235acfb30a1\": container with ID starting with eb799446224d7545cfa481b5721a5a1747e0fdfbc7bcd537eb6ec235acfb30a1 not found: ID does not exist" containerID="eb799446224d7545cfa481b5721a5a1747e0fdfbc7bcd537eb6ec235acfb30a1" Dec 05 17:10:31 crc kubenswrapper[4753]: I1205 17:10:31.280891 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eb799446224d7545cfa481b5721a5a1747e0fdfbc7bcd537eb6ec235acfb30a1"} err="failed to get container status \"eb799446224d7545cfa481b5721a5a1747e0fdfbc7bcd537eb6ec235acfb30a1\": rpc error: code = NotFound desc = could not find container \"eb799446224d7545cfa481b5721a5a1747e0fdfbc7bcd537eb6ec235acfb30a1\": container with ID starting with eb799446224d7545cfa481b5721a5a1747e0fdfbc7bcd537eb6ec235acfb30a1 not found: ID does not exist" Dec 05 17:10:31 crc kubenswrapper[4753]: I1205 17:10:31.280915 4753 scope.go:117] "RemoveContainer" containerID="5be7c5960ffb2fc15b84d5390bf5773ad1ba4843c0fb43e769be20a63f92d632" Dec 05 17:10:31 crc kubenswrapper[4753]: E1205 17:10:31.281791 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5be7c5960ffb2fc15b84d5390bf5773ad1ba4843c0fb43e769be20a63f92d632\": container with ID starting with 5be7c5960ffb2fc15b84d5390bf5773ad1ba4843c0fb43e769be20a63f92d632 not found: ID does not exist" containerID="5be7c5960ffb2fc15b84d5390bf5773ad1ba4843c0fb43e769be20a63f92d632" Dec 05 17:10:31 crc kubenswrapper[4753]: I1205 17:10:31.281849 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5be7c5960ffb2fc15b84d5390bf5773ad1ba4843c0fb43e769be20a63f92d632"} err="failed to get container status \"5be7c5960ffb2fc15b84d5390bf5773ad1ba4843c0fb43e769be20a63f92d632\": rpc error: code = NotFound desc = could not find container \"5be7c5960ffb2fc15b84d5390bf5773ad1ba4843c0fb43e769be20a63f92d632\": container with ID starting with 5be7c5960ffb2fc15b84d5390bf5773ad1ba4843c0fb43e769be20a63f92d632 not found: ID does not exist" Dec 05 17:10:31 crc kubenswrapper[4753]: I1205 17:10:31.281895 4753 scope.go:117] "RemoveContainer" containerID="f5933bb2c58a4c2827813946aed957f3d8f26119b2c0827b98048b8f4da09914" Dec 05 17:10:31 crc kubenswrapper[4753]: I1205 17:10:31.301411 4753 scope.go:117] "RemoveContainer" containerID="554f00266113309951d5c4824bd5c1def83c6655c5d30a5558cbf57440dd1891" Dec 05 17:10:31 crc kubenswrapper[4753]: I1205 17:10:31.317985 4753 scope.go:117] "RemoveContainer" containerID="be6abb93820e6c514a3cbe70abfbb8307e0ad38c3230dd947d8aa001d63a7a4c" Dec 05 17:10:31 crc kubenswrapper[4753]: I1205 17:10:31.339312 4753 scope.go:117] "RemoveContainer" containerID="f5933bb2c58a4c2827813946aed957f3d8f26119b2c0827b98048b8f4da09914" Dec 05 17:10:31 crc kubenswrapper[4753]: E1205 17:10:31.340709 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f5933bb2c58a4c2827813946aed957f3d8f26119b2c0827b98048b8f4da09914\": container with ID starting with f5933bb2c58a4c2827813946aed957f3d8f26119b2c0827b98048b8f4da09914 not found: ID does not exist" containerID="f5933bb2c58a4c2827813946aed957f3d8f26119b2c0827b98048b8f4da09914" Dec 05 17:10:31 crc kubenswrapper[4753]: I1205 17:10:31.340771 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f5933bb2c58a4c2827813946aed957f3d8f26119b2c0827b98048b8f4da09914"} err="failed to get container status \"f5933bb2c58a4c2827813946aed957f3d8f26119b2c0827b98048b8f4da09914\": rpc error: code = NotFound desc = could not find container \"f5933bb2c58a4c2827813946aed957f3d8f26119b2c0827b98048b8f4da09914\": container with ID starting with f5933bb2c58a4c2827813946aed957f3d8f26119b2c0827b98048b8f4da09914 not found: ID does not exist" Dec 05 17:10:31 crc kubenswrapper[4753]: I1205 17:10:31.340801 4753 scope.go:117] "RemoveContainer" containerID="554f00266113309951d5c4824bd5c1def83c6655c5d30a5558cbf57440dd1891" Dec 05 17:10:31 crc kubenswrapper[4753]: E1205 17:10:31.345076 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"554f00266113309951d5c4824bd5c1def83c6655c5d30a5558cbf57440dd1891\": container with ID starting with 554f00266113309951d5c4824bd5c1def83c6655c5d30a5558cbf57440dd1891 not found: ID does not exist" containerID="554f00266113309951d5c4824bd5c1def83c6655c5d30a5558cbf57440dd1891" Dec 05 17:10:31 crc kubenswrapper[4753]: I1205 17:10:31.345107 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"554f00266113309951d5c4824bd5c1def83c6655c5d30a5558cbf57440dd1891"} err="failed to get container status \"554f00266113309951d5c4824bd5c1def83c6655c5d30a5558cbf57440dd1891\": rpc error: code = NotFound desc = could not find container \"554f00266113309951d5c4824bd5c1def83c6655c5d30a5558cbf57440dd1891\": container with ID starting with 554f00266113309951d5c4824bd5c1def83c6655c5d30a5558cbf57440dd1891 not found: ID does not exist" Dec 05 17:10:31 crc kubenswrapper[4753]: I1205 17:10:31.345129 4753 scope.go:117] "RemoveContainer" containerID="be6abb93820e6c514a3cbe70abfbb8307e0ad38c3230dd947d8aa001d63a7a4c" Dec 05 17:10:31 crc kubenswrapper[4753]: E1205 17:10:31.345541 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"be6abb93820e6c514a3cbe70abfbb8307e0ad38c3230dd947d8aa001d63a7a4c\": container with ID starting with be6abb93820e6c514a3cbe70abfbb8307e0ad38c3230dd947d8aa001d63a7a4c not found: ID does not exist" containerID="be6abb93820e6c514a3cbe70abfbb8307e0ad38c3230dd947d8aa001d63a7a4c" Dec 05 17:10:31 crc kubenswrapper[4753]: I1205 17:10:31.345583 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"be6abb93820e6c514a3cbe70abfbb8307e0ad38c3230dd947d8aa001d63a7a4c"} err="failed to get container status \"be6abb93820e6c514a3cbe70abfbb8307e0ad38c3230dd947d8aa001d63a7a4c\": rpc error: code = NotFound desc = could not find container \"be6abb93820e6c514a3cbe70abfbb8307e0ad38c3230dd947d8aa001d63a7a4c\": container with ID starting with be6abb93820e6c514a3cbe70abfbb8307e0ad38c3230dd947d8aa001d63a7a4c not found: ID does not exist" Dec 05 17:10:31 crc kubenswrapper[4753]: I1205 17:10:31.414192 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-4vvd9"] Dec 05 17:10:31 crc kubenswrapper[4753]: I1205 17:10:31.729616 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="03fd3f93-fb41-4e32-8276-416c40f2b9a7" path="/var/lib/kubelet/pods/03fd3f93-fb41-4e32-8276-416c40f2b9a7/volumes" Dec 05 17:10:31 crc kubenswrapper[4753]: I1205 17:10:31.730505 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="36b2ef01-2c7a-4313-a03a-be77a660d987" path="/var/lib/kubelet/pods/36b2ef01-2c7a-4313-a03a-be77a660d987/volumes" Dec 05 17:10:31 crc kubenswrapper[4753]: I1205 17:10:31.731075 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5560048a-c9e9-4743-9573-1b58a2240c29" path="/var/lib/kubelet/pods/5560048a-c9e9-4743-9573-1b58a2240c29/volumes" Dec 05 17:10:31 crc kubenswrapper[4753]: I1205 17:10:31.734101 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="74502a8a-c734-4cd6-9687-2c4afbfa763e" path="/var/lib/kubelet/pods/74502a8a-c734-4cd6-9687-2c4afbfa763e/volumes" Dec 05 17:10:31 crc kubenswrapper[4753]: I1205 17:10:31.734861 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="db1dcb29-b751-4a77-8f2e-68efadf955b9" path="/var/lib/kubelet/pods/db1dcb29-b751-4a77-8f2e-68efadf955b9/volumes" Dec 05 17:10:31 crc kubenswrapper[4753]: I1205 17:10:31.736365 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="debd2d03-1310-4ac8-8555-90c54eb307a4" path="/var/lib/kubelet/pods/debd2d03-1310-4ac8-8555-90c54eb307a4/volumes" Dec 05 17:10:31 crc kubenswrapper[4753]: I1205 17:10:31.737043 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ee9a0d97-2cf8-48bc-b90a-59f0c4c3b76b" path="/var/lib/kubelet/pods/ee9a0d97-2cf8-48bc-b90a-59f0c4c3b76b/volumes" Dec 05 17:10:31 crc kubenswrapper[4753]: I1205 17:10:31.939353 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-697546d85-w8bh9" event={"ID":"75da1bbe-de00-4674-a99f-0ef291146c5f","Type":"ContainerStarted","Data":"57d175ad73d1d3ef57de59da92458de37fabbe3a74a7d7ff87ba6405e421313c"} Dec 05 17:10:31 crc kubenswrapper[4753]: I1205 17:10:31.940788 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-697546d85-w8bh9" Dec 05 17:10:31 crc kubenswrapper[4753]: I1205 17:10:31.946242 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-697546d85-w8bh9" Dec 05 17:10:31 crc kubenswrapper[4753]: I1205 17:10:31.949515 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-b88ff9559-jrwxq" event={"ID":"92261e30-74b9-4603-a401-43f1c7baa54b","Type":"ContainerStarted","Data":"1449fd99f9d2118d9366b9273254ccdb51d079055ae2a28e0a965054be2dff20"} Dec 05 17:10:31 crc kubenswrapper[4753]: I1205 17:10:31.951041 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-b88ff9559-jrwxq" Dec 05 17:10:31 crc kubenswrapper[4753]: I1205 17:10:31.952471 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-8rtl5" event={"ID":"7aeabdb9-1aef-44d2-85e5-c17fcb4290be","Type":"ContainerStarted","Data":"5dda004f3240e21696f38a0796f2340a1651e251747a0f94bba8e94e5dbb8a4c"} Dec 05 17:10:31 crc kubenswrapper[4753]: I1205 17:10:31.953138 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-8rtl5" Dec 05 17:10:31 crc kubenswrapper[4753]: I1205 17:10:31.956729 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-b88ff9559-jrwxq" Dec 05 17:10:31 crc kubenswrapper[4753]: I1205 17:10:31.957710 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-8rtl5" Dec 05 17:10:31 crc kubenswrapper[4753]: I1205 17:10:31.962981 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-697546d85-w8bh9" podStartSLOduration=3.962960472 podStartE2EDuration="3.962960472s" podCreationTimestamp="2025-12-05 17:10:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:10:31.9604812 +0000 UTC m=+370.463588226" watchObservedRunningTime="2025-12-05 17:10:31.962960472 +0000 UTC m=+370.466067478" Dec 05 17:10:32 crc kubenswrapper[4753]: I1205 17:10:32.036892 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-b88ff9559-jrwxq" podStartSLOduration=4.036874526 podStartE2EDuration="4.036874526s" podCreationTimestamp="2025-12-05 17:10:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:10:32.031743777 +0000 UTC m=+370.534850783" watchObservedRunningTime="2025-12-05 17:10:32.036874526 +0000 UTC m=+370.539981532" Dec 05 17:10:32 crc kubenswrapper[4753]: I1205 17:10:32.037790 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-dbnjm"] Dec 05 17:10:32 crc kubenswrapper[4753]: E1205 17:10:32.038073 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5560048a-c9e9-4743-9573-1b58a2240c29" containerName="extract-content" Dec 05 17:10:32 crc kubenswrapper[4753]: I1205 17:10:32.038091 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="5560048a-c9e9-4743-9573-1b58a2240c29" containerName="extract-content" Dec 05 17:10:32 crc kubenswrapper[4753]: E1205 17:10:32.038103 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee9a0d97-2cf8-48bc-b90a-59f0c4c3b76b" containerName="registry-server" Dec 05 17:10:32 crc kubenswrapper[4753]: I1205 17:10:32.038111 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee9a0d97-2cf8-48bc-b90a-59f0c4c3b76b" containerName="registry-server" Dec 05 17:10:32 crc kubenswrapper[4753]: E1205 17:10:32.038124 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36b2ef01-2c7a-4313-a03a-be77a660d987" containerName="extract-content" Dec 05 17:10:32 crc kubenswrapper[4753]: I1205 17:10:32.038131 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="36b2ef01-2c7a-4313-a03a-be77a660d987" containerName="extract-content" Dec 05 17:10:32 crc kubenswrapper[4753]: E1205 17:10:32.038141 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db1dcb29-b751-4a77-8f2e-68efadf955b9" containerName="extract-content" Dec 05 17:10:32 crc kubenswrapper[4753]: I1205 17:10:32.038164 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="db1dcb29-b751-4a77-8f2e-68efadf955b9" containerName="extract-content" Dec 05 17:10:32 crc kubenswrapper[4753]: E1205 17:10:32.038173 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03fd3f93-fb41-4e32-8276-416c40f2b9a7" containerName="marketplace-operator" Dec 05 17:10:32 crc kubenswrapper[4753]: I1205 17:10:32.038179 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="03fd3f93-fb41-4e32-8276-416c40f2b9a7" containerName="marketplace-operator" Dec 05 17:10:32 crc kubenswrapper[4753]: E1205 17:10:32.038188 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5560048a-c9e9-4743-9573-1b58a2240c29" containerName="registry-server" Dec 05 17:10:32 crc kubenswrapper[4753]: I1205 17:10:32.038194 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="5560048a-c9e9-4743-9573-1b58a2240c29" containerName="registry-server" Dec 05 17:10:32 crc kubenswrapper[4753]: E1205 17:10:32.038202 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee9a0d97-2cf8-48bc-b90a-59f0c4c3b76b" containerName="extract-content" Dec 05 17:10:32 crc kubenswrapper[4753]: I1205 17:10:32.038211 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee9a0d97-2cf8-48bc-b90a-59f0c4c3b76b" containerName="extract-content" Dec 05 17:10:32 crc kubenswrapper[4753]: E1205 17:10:32.038225 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5560048a-c9e9-4743-9573-1b58a2240c29" containerName="extract-utilities" Dec 05 17:10:32 crc kubenswrapper[4753]: I1205 17:10:32.038231 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="5560048a-c9e9-4743-9573-1b58a2240c29" containerName="extract-utilities" Dec 05 17:10:32 crc kubenswrapper[4753]: E1205 17:10:32.038244 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db1dcb29-b751-4a77-8f2e-68efadf955b9" containerName="extract-utilities" Dec 05 17:10:32 crc kubenswrapper[4753]: I1205 17:10:32.038250 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="db1dcb29-b751-4a77-8f2e-68efadf955b9" containerName="extract-utilities" Dec 05 17:10:32 crc kubenswrapper[4753]: E1205 17:10:32.038261 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee9a0d97-2cf8-48bc-b90a-59f0c4c3b76b" containerName="extract-utilities" Dec 05 17:10:32 crc kubenswrapper[4753]: I1205 17:10:32.038268 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee9a0d97-2cf8-48bc-b90a-59f0c4c3b76b" containerName="extract-utilities" Dec 05 17:10:32 crc kubenswrapper[4753]: E1205 17:10:32.038277 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36b2ef01-2c7a-4313-a03a-be77a660d987" containerName="extract-utilities" Dec 05 17:10:32 crc kubenswrapper[4753]: I1205 17:10:32.038286 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="36b2ef01-2c7a-4313-a03a-be77a660d987" containerName="extract-utilities" Dec 05 17:10:32 crc kubenswrapper[4753]: E1205 17:10:32.038298 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db1dcb29-b751-4a77-8f2e-68efadf955b9" containerName="registry-server" Dec 05 17:10:32 crc kubenswrapper[4753]: I1205 17:10:32.038305 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="db1dcb29-b751-4a77-8f2e-68efadf955b9" containerName="registry-server" Dec 05 17:10:32 crc kubenswrapper[4753]: E1205 17:10:32.038315 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36b2ef01-2c7a-4313-a03a-be77a660d987" containerName="registry-server" Dec 05 17:10:32 crc kubenswrapper[4753]: I1205 17:10:32.038321 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="36b2ef01-2c7a-4313-a03a-be77a660d987" containerName="registry-server" Dec 05 17:10:32 crc kubenswrapper[4753]: I1205 17:10:32.038437 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="03fd3f93-fb41-4e32-8276-416c40f2b9a7" containerName="marketplace-operator" Dec 05 17:10:32 crc kubenswrapper[4753]: I1205 17:10:32.038450 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="36b2ef01-2c7a-4313-a03a-be77a660d987" containerName="registry-server" Dec 05 17:10:32 crc kubenswrapper[4753]: I1205 17:10:32.038464 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="ee9a0d97-2cf8-48bc-b90a-59f0c4c3b76b" containerName="registry-server" Dec 05 17:10:32 crc kubenswrapper[4753]: I1205 17:10:32.038473 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="5560048a-c9e9-4743-9573-1b58a2240c29" containerName="registry-server" Dec 05 17:10:32 crc kubenswrapper[4753]: I1205 17:10:32.038481 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="db1dcb29-b751-4a77-8f2e-68efadf955b9" containerName="registry-server" Dec 05 17:10:32 crc kubenswrapper[4753]: E1205 17:10:32.038569 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03fd3f93-fb41-4e32-8276-416c40f2b9a7" containerName="marketplace-operator" Dec 05 17:10:32 crc kubenswrapper[4753]: I1205 17:10:32.038579 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="03fd3f93-fb41-4e32-8276-416c40f2b9a7" containerName="marketplace-operator" Dec 05 17:10:32 crc kubenswrapper[4753]: I1205 17:10:32.038685 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="03fd3f93-fb41-4e32-8276-416c40f2b9a7" containerName="marketplace-operator" Dec 05 17:10:32 crc kubenswrapper[4753]: I1205 17:10:32.039369 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dbnjm" Dec 05 17:10:32 crc kubenswrapper[4753]: I1205 17:10:32.042309 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 05 17:10:32 crc kubenswrapper[4753]: I1205 17:10:32.063910 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-dbnjm"] Dec 05 17:10:32 crc kubenswrapper[4753]: I1205 17:10:32.065241 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-8rtl5" podStartSLOduration=3.065213428 podStartE2EDuration="3.065213428s" podCreationTimestamp="2025-12-05 17:10:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:10:32.051995985 +0000 UTC m=+370.555102991" watchObservedRunningTime="2025-12-05 17:10:32.065213428 +0000 UTC m=+370.568320434" Dec 05 17:10:32 crc kubenswrapper[4753]: I1205 17:10:32.199938 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2a7a8437-e7be-454d-bc02-71af554d390b-utilities\") pod \"redhat-marketplace-dbnjm\" (UID: \"2a7a8437-e7be-454d-bc02-71af554d390b\") " pod="openshift-marketplace/redhat-marketplace-dbnjm" Dec 05 17:10:32 crc kubenswrapper[4753]: I1205 17:10:32.200013 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2ktnl\" (UniqueName: \"kubernetes.io/projected/2a7a8437-e7be-454d-bc02-71af554d390b-kube-api-access-2ktnl\") pod \"redhat-marketplace-dbnjm\" (UID: \"2a7a8437-e7be-454d-bc02-71af554d390b\") " pod="openshift-marketplace/redhat-marketplace-dbnjm" Dec 05 17:10:32 crc kubenswrapper[4753]: I1205 17:10:32.200264 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2a7a8437-e7be-454d-bc02-71af554d390b-catalog-content\") pod \"redhat-marketplace-dbnjm\" (UID: \"2a7a8437-e7be-454d-bc02-71af554d390b\") " pod="openshift-marketplace/redhat-marketplace-dbnjm" Dec 05 17:10:32 crc kubenswrapper[4753]: I1205 17:10:32.222783 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-s6f9l"] Dec 05 17:10:32 crc kubenswrapper[4753]: I1205 17:10:32.224787 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-s6f9l" Dec 05 17:10:32 crc kubenswrapper[4753]: I1205 17:10:32.227385 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 05 17:10:32 crc kubenswrapper[4753]: I1205 17:10:32.243374 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-s6f9l"] Dec 05 17:10:32 crc kubenswrapper[4753]: I1205 17:10:32.301470 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2a300708-258b-4ba6-b7c1-c46b90c8ec36-catalog-content\") pod \"certified-operators-s6f9l\" (UID: \"2a300708-258b-4ba6-b7c1-c46b90c8ec36\") " pod="openshift-marketplace/certified-operators-s6f9l" Dec 05 17:10:32 crc kubenswrapper[4753]: I1205 17:10:32.301534 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2a7a8437-e7be-454d-bc02-71af554d390b-catalog-content\") pod \"redhat-marketplace-dbnjm\" (UID: \"2a7a8437-e7be-454d-bc02-71af554d390b\") " pod="openshift-marketplace/redhat-marketplace-dbnjm" Dec 05 17:10:32 crc kubenswrapper[4753]: I1205 17:10:32.301770 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2a300708-258b-4ba6-b7c1-c46b90c8ec36-utilities\") pod \"certified-operators-s6f9l\" (UID: \"2a300708-258b-4ba6-b7c1-c46b90c8ec36\") " pod="openshift-marketplace/certified-operators-s6f9l" Dec 05 17:10:32 crc kubenswrapper[4753]: I1205 17:10:32.301838 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nv58c\" (UniqueName: \"kubernetes.io/projected/2a300708-258b-4ba6-b7c1-c46b90c8ec36-kube-api-access-nv58c\") pod \"certified-operators-s6f9l\" (UID: \"2a300708-258b-4ba6-b7c1-c46b90c8ec36\") " pod="openshift-marketplace/certified-operators-s6f9l" Dec 05 17:10:32 crc kubenswrapper[4753]: I1205 17:10:32.301910 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2a7a8437-e7be-454d-bc02-71af554d390b-utilities\") pod \"redhat-marketplace-dbnjm\" (UID: \"2a7a8437-e7be-454d-bc02-71af554d390b\") " pod="openshift-marketplace/redhat-marketplace-dbnjm" Dec 05 17:10:32 crc kubenswrapper[4753]: I1205 17:10:32.301968 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2a7a8437-e7be-454d-bc02-71af554d390b-catalog-content\") pod \"redhat-marketplace-dbnjm\" (UID: \"2a7a8437-e7be-454d-bc02-71af554d390b\") " pod="openshift-marketplace/redhat-marketplace-dbnjm" Dec 05 17:10:32 crc kubenswrapper[4753]: I1205 17:10:32.302000 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2ktnl\" (UniqueName: \"kubernetes.io/projected/2a7a8437-e7be-454d-bc02-71af554d390b-kube-api-access-2ktnl\") pod \"redhat-marketplace-dbnjm\" (UID: \"2a7a8437-e7be-454d-bc02-71af554d390b\") " pod="openshift-marketplace/redhat-marketplace-dbnjm" Dec 05 17:10:32 crc kubenswrapper[4753]: I1205 17:10:32.302316 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2a7a8437-e7be-454d-bc02-71af554d390b-utilities\") pod \"redhat-marketplace-dbnjm\" (UID: \"2a7a8437-e7be-454d-bc02-71af554d390b\") " pod="openshift-marketplace/redhat-marketplace-dbnjm" Dec 05 17:10:32 crc kubenswrapper[4753]: I1205 17:10:32.341372 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2ktnl\" (UniqueName: \"kubernetes.io/projected/2a7a8437-e7be-454d-bc02-71af554d390b-kube-api-access-2ktnl\") pod \"redhat-marketplace-dbnjm\" (UID: \"2a7a8437-e7be-454d-bc02-71af554d390b\") " pod="openshift-marketplace/redhat-marketplace-dbnjm" Dec 05 17:10:32 crc kubenswrapper[4753]: I1205 17:10:32.362408 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dbnjm" Dec 05 17:10:32 crc kubenswrapper[4753]: I1205 17:10:32.403551 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2a300708-258b-4ba6-b7c1-c46b90c8ec36-utilities\") pod \"certified-operators-s6f9l\" (UID: \"2a300708-258b-4ba6-b7c1-c46b90c8ec36\") " pod="openshift-marketplace/certified-operators-s6f9l" Dec 05 17:10:32 crc kubenswrapper[4753]: I1205 17:10:32.403626 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nv58c\" (UniqueName: \"kubernetes.io/projected/2a300708-258b-4ba6-b7c1-c46b90c8ec36-kube-api-access-nv58c\") pod \"certified-operators-s6f9l\" (UID: \"2a300708-258b-4ba6-b7c1-c46b90c8ec36\") " pod="openshift-marketplace/certified-operators-s6f9l" Dec 05 17:10:32 crc kubenswrapper[4753]: I1205 17:10:32.403695 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2a300708-258b-4ba6-b7c1-c46b90c8ec36-catalog-content\") pod \"certified-operators-s6f9l\" (UID: \"2a300708-258b-4ba6-b7c1-c46b90c8ec36\") " pod="openshift-marketplace/certified-operators-s6f9l" Dec 05 17:10:32 crc kubenswrapper[4753]: I1205 17:10:32.405015 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2a300708-258b-4ba6-b7c1-c46b90c8ec36-utilities\") pod \"certified-operators-s6f9l\" (UID: \"2a300708-258b-4ba6-b7c1-c46b90c8ec36\") " pod="openshift-marketplace/certified-operators-s6f9l" Dec 05 17:10:32 crc kubenswrapper[4753]: I1205 17:10:32.405056 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2a300708-258b-4ba6-b7c1-c46b90c8ec36-catalog-content\") pod \"certified-operators-s6f9l\" (UID: \"2a300708-258b-4ba6-b7c1-c46b90c8ec36\") " pod="openshift-marketplace/certified-operators-s6f9l" Dec 05 17:10:32 crc kubenswrapper[4753]: I1205 17:10:32.438008 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nv58c\" (UniqueName: \"kubernetes.io/projected/2a300708-258b-4ba6-b7c1-c46b90c8ec36-kube-api-access-nv58c\") pod \"certified-operators-s6f9l\" (UID: \"2a300708-258b-4ba6-b7c1-c46b90c8ec36\") " pod="openshift-marketplace/certified-operators-s6f9l" Dec 05 17:10:32 crc kubenswrapper[4753]: I1205 17:10:32.549809 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-s6f9l" Dec 05 17:10:32 crc kubenswrapper[4753]: I1205 17:10:32.681293 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-dbnjm"] Dec 05 17:10:32 crc kubenswrapper[4753]: W1205 17:10:32.690039 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2a7a8437_e7be_454d_bc02_71af554d390b.slice/crio-d5b2a351a84c5947cb63e76f2d78082462ba26c8ad1c846706232dccbbe5e06e WatchSource:0}: Error finding container d5b2a351a84c5947cb63e76f2d78082462ba26c8ad1c846706232dccbbe5e06e: Status 404 returned error can't find the container with id d5b2a351a84c5947cb63e76f2d78082462ba26c8ad1c846706232dccbbe5e06e Dec 05 17:10:32 crc kubenswrapper[4753]: I1205 17:10:32.965842 4753 generic.go:334] "Generic (PLEG): container finished" podID="2a7a8437-e7be-454d-bc02-71af554d390b" containerID="6b5d72e58ee0dd0ef97d7d2260a3fea81d98ad1a6dea58d8a6eaf37131fdb69d" exitCode=0 Dec 05 17:10:32 crc kubenswrapper[4753]: I1205 17:10:32.965926 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dbnjm" event={"ID":"2a7a8437-e7be-454d-bc02-71af554d390b","Type":"ContainerDied","Data":"6b5d72e58ee0dd0ef97d7d2260a3fea81d98ad1a6dea58d8a6eaf37131fdb69d"} Dec 05 17:10:32 crc kubenswrapper[4753]: I1205 17:10:32.968883 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dbnjm" event={"ID":"2a7a8437-e7be-454d-bc02-71af554d390b","Type":"ContainerStarted","Data":"d5b2a351a84c5947cb63e76f2d78082462ba26c8ad1c846706232dccbbe5e06e"} Dec 05 17:10:33 crc kubenswrapper[4753]: I1205 17:10:33.017393 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-s6f9l"] Dec 05 17:10:33 crc kubenswrapper[4753]: W1205 17:10:33.029162 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2a300708_258b_4ba6_b7c1_c46b90c8ec36.slice/crio-8199d3b42a5ccacb4ab68e7531591a2f4970b6e8bbe7155f401cfb63ee5749aa WatchSource:0}: Error finding container 8199d3b42a5ccacb4ab68e7531591a2f4970b6e8bbe7155f401cfb63ee5749aa: Status 404 returned error can't find the container with id 8199d3b42a5ccacb4ab68e7531591a2f4970b6e8bbe7155f401cfb63ee5749aa Dec 05 17:10:33 crc kubenswrapper[4753]: I1205 17:10:33.979484 4753 generic.go:334] "Generic (PLEG): container finished" podID="2a7a8437-e7be-454d-bc02-71af554d390b" containerID="024244304bc07028ae7f7b297c255c820f0750e6f555cd3059cc989353054d02" exitCode=0 Dec 05 17:10:33 crc kubenswrapper[4753]: I1205 17:10:33.979613 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dbnjm" event={"ID":"2a7a8437-e7be-454d-bc02-71af554d390b","Type":"ContainerDied","Data":"024244304bc07028ae7f7b297c255c820f0750e6f555cd3059cc989353054d02"} Dec 05 17:10:33 crc kubenswrapper[4753]: I1205 17:10:33.983844 4753 generic.go:334] "Generic (PLEG): container finished" podID="2a300708-258b-4ba6-b7c1-c46b90c8ec36" containerID="6dc4bf794f9038adcfc36777073865ea0fe8ae64a5b55618a89faf786504992f" exitCode=0 Dec 05 17:10:33 crc kubenswrapper[4753]: I1205 17:10:33.984351 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s6f9l" event={"ID":"2a300708-258b-4ba6-b7c1-c46b90c8ec36","Type":"ContainerDied","Data":"6dc4bf794f9038adcfc36777073865ea0fe8ae64a5b55618a89faf786504992f"} Dec 05 17:10:33 crc kubenswrapper[4753]: I1205 17:10:33.984409 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s6f9l" event={"ID":"2a300708-258b-4ba6-b7c1-c46b90c8ec36","Type":"ContainerStarted","Data":"8199d3b42a5ccacb4ab68e7531591a2f4970b6e8bbe7155f401cfb63ee5749aa"} Dec 05 17:10:34 crc kubenswrapper[4753]: I1205 17:10:34.623377 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-hh2kd"] Dec 05 17:10:34 crc kubenswrapper[4753]: I1205 17:10:34.624907 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hh2kd" Dec 05 17:10:34 crc kubenswrapper[4753]: I1205 17:10:34.627311 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 05 17:10:34 crc kubenswrapper[4753]: I1205 17:10:34.642511 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hh2kd"] Dec 05 17:10:34 crc kubenswrapper[4753]: I1205 17:10:34.741698 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b7552952-65cd-4686-89de-8cd96ba599f2-catalog-content\") pod \"community-operators-hh2kd\" (UID: \"b7552952-65cd-4686-89de-8cd96ba599f2\") " pod="openshift-marketplace/community-operators-hh2kd" Dec 05 17:10:34 crc kubenswrapper[4753]: I1205 17:10:34.742087 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b7552952-65cd-4686-89de-8cd96ba599f2-utilities\") pod \"community-operators-hh2kd\" (UID: \"b7552952-65cd-4686-89de-8cd96ba599f2\") " pod="openshift-marketplace/community-operators-hh2kd" Dec 05 17:10:34 crc kubenswrapper[4753]: I1205 17:10:34.742303 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9h8k6\" (UniqueName: \"kubernetes.io/projected/b7552952-65cd-4686-89de-8cd96ba599f2-kube-api-access-9h8k6\") pod \"community-operators-hh2kd\" (UID: \"b7552952-65cd-4686-89de-8cd96ba599f2\") " pod="openshift-marketplace/community-operators-hh2kd" Dec 05 17:10:34 crc kubenswrapper[4753]: I1205 17:10:34.825020 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-lk7z9"] Dec 05 17:10:34 crc kubenswrapper[4753]: I1205 17:10:34.826333 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lk7z9" Dec 05 17:10:34 crc kubenswrapper[4753]: I1205 17:10:34.828684 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 05 17:10:34 crc kubenswrapper[4753]: I1205 17:10:34.833136 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-lk7z9"] Dec 05 17:10:34 crc kubenswrapper[4753]: I1205 17:10:34.843603 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b7552952-65cd-4686-89de-8cd96ba599f2-catalog-content\") pod \"community-operators-hh2kd\" (UID: \"b7552952-65cd-4686-89de-8cd96ba599f2\") " pod="openshift-marketplace/community-operators-hh2kd" Dec 05 17:10:34 crc kubenswrapper[4753]: I1205 17:10:34.843648 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b7552952-65cd-4686-89de-8cd96ba599f2-utilities\") pod \"community-operators-hh2kd\" (UID: \"b7552952-65cd-4686-89de-8cd96ba599f2\") " pod="openshift-marketplace/community-operators-hh2kd" Dec 05 17:10:34 crc kubenswrapper[4753]: I1205 17:10:34.843686 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9h8k6\" (UniqueName: \"kubernetes.io/projected/b7552952-65cd-4686-89de-8cd96ba599f2-kube-api-access-9h8k6\") pod \"community-operators-hh2kd\" (UID: \"b7552952-65cd-4686-89de-8cd96ba599f2\") " pod="openshift-marketplace/community-operators-hh2kd" Dec 05 17:10:34 crc kubenswrapper[4753]: I1205 17:10:34.844401 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b7552952-65cd-4686-89de-8cd96ba599f2-catalog-content\") pod \"community-operators-hh2kd\" (UID: \"b7552952-65cd-4686-89de-8cd96ba599f2\") " pod="openshift-marketplace/community-operators-hh2kd" Dec 05 17:10:34 crc kubenswrapper[4753]: I1205 17:10:34.844715 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b7552952-65cd-4686-89de-8cd96ba599f2-utilities\") pod \"community-operators-hh2kd\" (UID: \"b7552952-65cd-4686-89de-8cd96ba599f2\") " pod="openshift-marketplace/community-operators-hh2kd" Dec 05 17:10:34 crc kubenswrapper[4753]: I1205 17:10:34.871328 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9h8k6\" (UniqueName: \"kubernetes.io/projected/b7552952-65cd-4686-89de-8cd96ba599f2-kube-api-access-9h8k6\") pod \"community-operators-hh2kd\" (UID: \"b7552952-65cd-4686-89de-8cd96ba599f2\") " pod="openshift-marketplace/community-operators-hh2kd" Dec 05 17:10:34 crc kubenswrapper[4753]: I1205 17:10:34.945545 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f35bd233-6e5b-4099-8da5-2dda1519d793-catalog-content\") pod \"redhat-operators-lk7z9\" (UID: \"f35bd233-6e5b-4099-8da5-2dda1519d793\") " pod="openshift-marketplace/redhat-operators-lk7z9" Dec 05 17:10:34 crc kubenswrapper[4753]: I1205 17:10:34.945629 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-68f7p\" (UniqueName: \"kubernetes.io/projected/f35bd233-6e5b-4099-8da5-2dda1519d793-kube-api-access-68f7p\") pod \"redhat-operators-lk7z9\" (UID: \"f35bd233-6e5b-4099-8da5-2dda1519d793\") " pod="openshift-marketplace/redhat-operators-lk7z9" Dec 05 17:10:34 crc kubenswrapper[4753]: I1205 17:10:34.945684 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f35bd233-6e5b-4099-8da5-2dda1519d793-utilities\") pod \"redhat-operators-lk7z9\" (UID: \"f35bd233-6e5b-4099-8da5-2dda1519d793\") " pod="openshift-marketplace/redhat-operators-lk7z9" Dec 05 17:10:34 crc kubenswrapper[4753]: I1205 17:10:34.947039 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hh2kd" Dec 05 17:10:34 crc kubenswrapper[4753]: I1205 17:10:34.995410 4753 generic.go:334] "Generic (PLEG): container finished" podID="2a300708-258b-4ba6-b7c1-c46b90c8ec36" containerID="d079994f40eeb432b6f1ea8d30f94a06f09e5f4861c3ef72590911d5eb526427" exitCode=0 Dec 05 17:10:34 crc kubenswrapper[4753]: I1205 17:10:34.995523 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s6f9l" event={"ID":"2a300708-258b-4ba6-b7c1-c46b90c8ec36","Type":"ContainerDied","Data":"d079994f40eeb432b6f1ea8d30f94a06f09e5f4861c3ef72590911d5eb526427"} Dec 05 17:10:35 crc kubenswrapper[4753]: I1205 17:10:35.004896 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dbnjm" event={"ID":"2a7a8437-e7be-454d-bc02-71af554d390b","Type":"ContainerStarted","Data":"a9d3865af6941b82209ba4c5de476f72c8f223245c848bde840b1276f8a8578f"} Dec 05 17:10:35 crc kubenswrapper[4753]: I1205 17:10:35.045322 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-dbnjm" podStartSLOduration=1.66003587 podStartE2EDuration="3.045301601s" podCreationTimestamp="2025-12-05 17:10:32 +0000 UTC" firstStartedPulling="2025-12-05 17:10:32.968112269 +0000 UTC m=+371.471219275" lastFinishedPulling="2025-12-05 17:10:34.353378 +0000 UTC m=+372.856485006" observedRunningTime="2025-12-05 17:10:35.043958052 +0000 UTC m=+373.547065068" watchObservedRunningTime="2025-12-05 17:10:35.045301601 +0000 UTC m=+373.548408607" Dec 05 17:10:35 crc kubenswrapper[4753]: I1205 17:10:35.046626 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-68f7p\" (UniqueName: \"kubernetes.io/projected/f35bd233-6e5b-4099-8da5-2dda1519d793-kube-api-access-68f7p\") pod \"redhat-operators-lk7z9\" (UID: \"f35bd233-6e5b-4099-8da5-2dda1519d793\") " pod="openshift-marketplace/redhat-operators-lk7z9" Dec 05 17:10:35 crc kubenswrapper[4753]: I1205 17:10:35.046751 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f35bd233-6e5b-4099-8da5-2dda1519d793-utilities\") pod \"redhat-operators-lk7z9\" (UID: \"f35bd233-6e5b-4099-8da5-2dda1519d793\") " pod="openshift-marketplace/redhat-operators-lk7z9" Dec 05 17:10:35 crc kubenswrapper[4753]: I1205 17:10:35.046824 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f35bd233-6e5b-4099-8da5-2dda1519d793-catalog-content\") pod \"redhat-operators-lk7z9\" (UID: \"f35bd233-6e5b-4099-8da5-2dda1519d793\") " pod="openshift-marketplace/redhat-operators-lk7z9" Dec 05 17:10:35 crc kubenswrapper[4753]: I1205 17:10:35.047394 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f35bd233-6e5b-4099-8da5-2dda1519d793-catalog-content\") pod \"redhat-operators-lk7z9\" (UID: \"f35bd233-6e5b-4099-8da5-2dda1519d793\") " pod="openshift-marketplace/redhat-operators-lk7z9" Dec 05 17:10:35 crc kubenswrapper[4753]: I1205 17:10:35.047953 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f35bd233-6e5b-4099-8da5-2dda1519d793-utilities\") pod \"redhat-operators-lk7z9\" (UID: \"f35bd233-6e5b-4099-8da5-2dda1519d793\") " pod="openshift-marketplace/redhat-operators-lk7z9" Dec 05 17:10:35 crc kubenswrapper[4753]: I1205 17:10:35.083690 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-68f7p\" (UniqueName: \"kubernetes.io/projected/f35bd233-6e5b-4099-8da5-2dda1519d793-kube-api-access-68f7p\") pod \"redhat-operators-lk7z9\" (UID: \"f35bd233-6e5b-4099-8da5-2dda1519d793\") " pod="openshift-marketplace/redhat-operators-lk7z9" Dec 05 17:10:35 crc kubenswrapper[4753]: I1205 17:10:35.141681 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lk7z9" Dec 05 17:10:35 crc kubenswrapper[4753]: I1205 17:10:35.442523 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hh2kd"] Dec 05 17:10:35 crc kubenswrapper[4753]: W1205 17:10:35.452398 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb7552952_65cd_4686_89de_8cd96ba599f2.slice/crio-62d8a860fe6acc3db40f4a9a3f7d1fa7bc8316198c28af546880f1fd57d9fbfe WatchSource:0}: Error finding container 62d8a860fe6acc3db40f4a9a3f7d1fa7bc8316198c28af546880f1fd57d9fbfe: Status 404 returned error can't find the container with id 62d8a860fe6acc3db40f4a9a3f7d1fa7bc8316198c28af546880f1fd57d9fbfe Dec 05 17:10:35 crc kubenswrapper[4753]: I1205 17:10:35.590963 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-lk7z9"] Dec 05 17:10:36 crc kubenswrapper[4753]: I1205 17:10:36.014791 4753 generic.go:334] "Generic (PLEG): container finished" podID="b7552952-65cd-4686-89de-8cd96ba599f2" containerID="08b10796122e202d602941ff908d7988da304bfc25841c2e32b17b4281c68c4d" exitCode=0 Dec 05 17:10:36 crc kubenswrapper[4753]: I1205 17:10:36.014984 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hh2kd" event={"ID":"b7552952-65cd-4686-89de-8cd96ba599f2","Type":"ContainerDied","Data":"08b10796122e202d602941ff908d7988da304bfc25841c2e32b17b4281c68c4d"} Dec 05 17:10:36 crc kubenswrapper[4753]: I1205 17:10:36.015200 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hh2kd" event={"ID":"b7552952-65cd-4686-89de-8cd96ba599f2","Type":"ContainerStarted","Data":"62d8a860fe6acc3db40f4a9a3f7d1fa7bc8316198c28af546880f1fd57d9fbfe"} Dec 05 17:10:36 crc kubenswrapper[4753]: I1205 17:10:36.018843 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s6f9l" event={"ID":"2a300708-258b-4ba6-b7c1-c46b90c8ec36","Type":"ContainerStarted","Data":"2e3b27412ddd8a10cc4067e8f8da96e32b225773c7ac8787ecae7f5cb075dd3d"} Dec 05 17:10:36 crc kubenswrapper[4753]: I1205 17:10:36.021373 4753 generic.go:334] "Generic (PLEG): container finished" podID="f35bd233-6e5b-4099-8da5-2dda1519d793" containerID="dbb60b6e2de7b4e0908b536eab3b5c777e363874e86285e5b6f7f6a98aea7f58" exitCode=0 Dec 05 17:10:36 crc kubenswrapper[4753]: I1205 17:10:36.021442 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lk7z9" event={"ID":"f35bd233-6e5b-4099-8da5-2dda1519d793","Type":"ContainerDied","Data":"dbb60b6e2de7b4e0908b536eab3b5c777e363874e86285e5b6f7f6a98aea7f58"} Dec 05 17:10:36 crc kubenswrapper[4753]: I1205 17:10:36.021478 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lk7z9" event={"ID":"f35bd233-6e5b-4099-8da5-2dda1519d793","Type":"ContainerStarted","Data":"94b5329128ddaa36e1822648a2e8263f095c06559bd129082949d518ba28c002"} Dec 05 17:10:36 crc kubenswrapper[4753]: I1205 17:10:36.097103 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-s6f9l" podStartSLOduration=2.694697582 podStartE2EDuration="4.097084291s" podCreationTimestamp="2025-12-05 17:10:32 +0000 UTC" firstStartedPulling="2025-12-05 17:10:33.98757379 +0000 UTC m=+372.490680796" lastFinishedPulling="2025-12-05 17:10:35.389960499 +0000 UTC m=+373.893067505" observedRunningTime="2025-12-05 17:10:36.092882239 +0000 UTC m=+374.595989265" watchObservedRunningTime="2025-12-05 17:10:36.097084291 +0000 UTC m=+374.600191297" Dec 05 17:10:37 crc kubenswrapper[4753]: I1205 17:10:37.029640 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hh2kd" event={"ID":"b7552952-65cd-4686-89de-8cd96ba599f2","Type":"ContainerStarted","Data":"f380278697bb8f5b58161863e01879b9727dc70e112a1d4407d2179d7886ec9f"} Dec 05 17:10:37 crc kubenswrapper[4753]: I1205 17:10:37.032760 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lk7z9" event={"ID":"f35bd233-6e5b-4099-8da5-2dda1519d793","Type":"ContainerStarted","Data":"ff005ed0c14b193f46ba141f0a2b6b3a6f144f304e1153b19636acb46e9f87bf"} Dec 05 17:10:38 crc kubenswrapper[4753]: I1205 17:10:38.041653 4753 generic.go:334] "Generic (PLEG): container finished" podID="f35bd233-6e5b-4099-8da5-2dda1519d793" containerID="ff005ed0c14b193f46ba141f0a2b6b3a6f144f304e1153b19636acb46e9f87bf" exitCode=0 Dec 05 17:10:38 crc kubenswrapper[4753]: I1205 17:10:38.041748 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lk7z9" event={"ID":"f35bd233-6e5b-4099-8da5-2dda1519d793","Type":"ContainerDied","Data":"ff005ed0c14b193f46ba141f0a2b6b3a6f144f304e1153b19636acb46e9f87bf"} Dec 05 17:10:38 crc kubenswrapper[4753]: I1205 17:10:38.047320 4753 generic.go:334] "Generic (PLEG): container finished" podID="b7552952-65cd-4686-89de-8cd96ba599f2" containerID="f380278697bb8f5b58161863e01879b9727dc70e112a1d4407d2179d7886ec9f" exitCode=0 Dec 05 17:10:38 crc kubenswrapper[4753]: I1205 17:10:38.047567 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hh2kd" event={"ID":"b7552952-65cd-4686-89de-8cd96ba599f2","Type":"ContainerDied","Data":"f380278697bb8f5b58161863e01879b9727dc70e112a1d4407d2179d7886ec9f"} Dec 05 17:10:39 crc kubenswrapper[4753]: I1205 17:10:39.060364 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lk7z9" event={"ID":"f35bd233-6e5b-4099-8da5-2dda1519d793","Type":"ContainerStarted","Data":"3aec93846696e4790cb572c3a09bbc6fd6c845e2686ba965ad33853529730f9d"} Dec 05 17:10:39 crc kubenswrapper[4753]: I1205 17:10:39.064569 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hh2kd" event={"ID":"b7552952-65cd-4686-89de-8cd96ba599f2","Type":"ContainerStarted","Data":"22a898834ab748a70de92d420109c304a69655763abd1f51589b399d0a9b760a"} Dec 05 17:10:39 crc kubenswrapper[4753]: I1205 17:10:39.086635 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-lk7z9" podStartSLOduration=2.686569841 podStartE2EDuration="5.086603278s" podCreationTimestamp="2025-12-05 17:10:34 +0000 UTC" firstStartedPulling="2025-12-05 17:10:36.023316041 +0000 UTC m=+374.526423047" lastFinishedPulling="2025-12-05 17:10:38.423349438 +0000 UTC m=+376.926456484" observedRunningTime="2025-12-05 17:10:39.08150201 +0000 UTC m=+377.584609036" watchObservedRunningTime="2025-12-05 17:10:39.086603278 +0000 UTC m=+377.589710284" Dec 05 17:10:39 crc kubenswrapper[4753]: I1205 17:10:39.113066 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-hh2kd" podStartSLOduration=2.6467321249999998 podStartE2EDuration="5.113044475s" podCreationTimestamp="2025-12-05 17:10:34 +0000 UTC" firstStartedPulling="2025-12-05 17:10:36.017018818 +0000 UTC m=+374.520125824" lastFinishedPulling="2025-12-05 17:10:38.483331158 +0000 UTC m=+376.986438174" observedRunningTime="2025-12-05 17:10:39.107569086 +0000 UTC m=+377.610676092" watchObservedRunningTime="2025-12-05 17:10:39.113044475 +0000 UTC m=+377.616151481" Dec 05 17:10:42 crc kubenswrapper[4753]: I1205 17:10:42.363595 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-dbnjm" Dec 05 17:10:42 crc kubenswrapper[4753]: I1205 17:10:42.365172 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-dbnjm" Dec 05 17:10:42 crc kubenswrapper[4753]: I1205 17:10:42.415896 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-dbnjm" Dec 05 17:10:42 crc kubenswrapper[4753]: I1205 17:10:42.551111 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-s6f9l" Dec 05 17:10:42 crc kubenswrapper[4753]: I1205 17:10:42.551233 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-s6f9l" Dec 05 17:10:42 crc kubenswrapper[4753]: I1205 17:10:42.595900 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-s6f9l" Dec 05 17:10:43 crc kubenswrapper[4753]: I1205 17:10:43.147876 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-s6f9l" Dec 05 17:10:43 crc kubenswrapper[4753]: I1205 17:10:43.151343 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-dbnjm" Dec 05 17:10:44 crc kubenswrapper[4753]: I1205 17:10:44.948350 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-hh2kd" Dec 05 17:10:44 crc kubenswrapper[4753]: I1205 17:10:44.949584 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-hh2kd" Dec 05 17:10:45 crc kubenswrapper[4753]: I1205 17:10:45.039376 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-hh2kd" Dec 05 17:10:45 crc kubenswrapper[4753]: I1205 17:10:45.142968 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-lk7z9" Dec 05 17:10:45 crc kubenswrapper[4753]: I1205 17:10:45.143381 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-lk7z9" Dec 05 17:10:45 crc kubenswrapper[4753]: I1205 17:10:45.161013 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-hh2kd" Dec 05 17:10:45 crc kubenswrapper[4753]: I1205 17:10:45.193079 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-lk7z9" Dec 05 17:10:46 crc kubenswrapper[4753]: I1205 17:10:46.166637 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-lk7z9" Dec 05 17:10:56 crc kubenswrapper[4753]: I1205 17:10:56.470326 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" podUID="abca9c47-e52b-4410-83e1-b486f8f01aca" containerName="registry" containerID="cri-o://b17c4d232ba927af4baa91d985bf94d32f9e39a0b5b54283b901285c941dcf57" gracePeriod=30 Dec 05 17:10:56 crc kubenswrapper[4753]: I1205 17:10:56.975698 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:10:57 crc kubenswrapper[4753]: I1205 17:10:57.054339 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/abca9c47-e52b-4410-83e1-b486f8f01aca-bound-sa-token\") pod \"abca9c47-e52b-4410-83e1-b486f8f01aca\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " Dec 05 17:10:57 crc kubenswrapper[4753]: I1205 17:10:57.054762 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"abca9c47-e52b-4410-83e1-b486f8f01aca\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " Dec 05 17:10:57 crc kubenswrapper[4753]: I1205 17:10:57.054842 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/abca9c47-e52b-4410-83e1-b486f8f01aca-registry-tls\") pod \"abca9c47-e52b-4410-83e1-b486f8f01aca\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " Dec 05 17:10:57 crc kubenswrapper[4753]: I1205 17:10:57.054903 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/abca9c47-e52b-4410-83e1-b486f8f01aca-registry-certificates\") pod \"abca9c47-e52b-4410-83e1-b486f8f01aca\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " Dec 05 17:10:57 crc kubenswrapper[4753]: I1205 17:10:57.054962 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/abca9c47-e52b-4410-83e1-b486f8f01aca-ca-trust-extracted\") pod \"abca9c47-e52b-4410-83e1-b486f8f01aca\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " Dec 05 17:10:57 crc kubenswrapper[4753]: I1205 17:10:57.054995 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s92jj\" (UniqueName: \"kubernetes.io/projected/abca9c47-e52b-4410-83e1-b486f8f01aca-kube-api-access-s92jj\") pod \"abca9c47-e52b-4410-83e1-b486f8f01aca\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " Dec 05 17:10:57 crc kubenswrapper[4753]: I1205 17:10:57.055063 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/abca9c47-e52b-4410-83e1-b486f8f01aca-installation-pull-secrets\") pod \"abca9c47-e52b-4410-83e1-b486f8f01aca\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " Dec 05 17:10:57 crc kubenswrapper[4753]: I1205 17:10:57.055177 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/abca9c47-e52b-4410-83e1-b486f8f01aca-trusted-ca\") pod \"abca9c47-e52b-4410-83e1-b486f8f01aca\" (UID: \"abca9c47-e52b-4410-83e1-b486f8f01aca\") " Dec 05 17:10:57 crc kubenswrapper[4753]: I1205 17:10:57.056139 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/abca9c47-e52b-4410-83e1-b486f8f01aca-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "abca9c47-e52b-4410-83e1-b486f8f01aca" (UID: "abca9c47-e52b-4410-83e1-b486f8f01aca"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:10:57 crc kubenswrapper[4753]: I1205 17:10:57.056312 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/abca9c47-e52b-4410-83e1-b486f8f01aca-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "abca9c47-e52b-4410-83e1-b486f8f01aca" (UID: "abca9c47-e52b-4410-83e1-b486f8f01aca"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:10:57 crc kubenswrapper[4753]: I1205 17:10:57.062647 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/abca9c47-e52b-4410-83e1-b486f8f01aca-kube-api-access-s92jj" (OuterVolumeSpecName: "kube-api-access-s92jj") pod "abca9c47-e52b-4410-83e1-b486f8f01aca" (UID: "abca9c47-e52b-4410-83e1-b486f8f01aca"). InnerVolumeSpecName "kube-api-access-s92jj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:10:57 crc kubenswrapper[4753]: I1205 17:10:57.063136 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/abca9c47-e52b-4410-83e1-b486f8f01aca-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "abca9c47-e52b-4410-83e1-b486f8f01aca" (UID: "abca9c47-e52b-4410-83e1-b486f8f01aca"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:10:57 crc kubenswrapper[4753]: I1205 17:10:57.064839 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/abca9c47-e52b-4410-83e1-b486f8f01aca-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "abca9c47-e52b-4410-83e1-b486f8f01aca" (UID: "abca9c47-e52b-4410-83e1-b486f8f01aca"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:10:57 crc kubenswrapper[4753]: I1205 17:10:57.076623 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/abca9c47-e52b-4410-83e1-b486f8f01aca-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "abca9c47-e52b-4410-83e1-b486f8f01aca" (UID: "abca9c47-e52b-4410-83e1-b486f8f01aca"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:10:57 crc kubenswrapper[4753]: I1205 17:10:57.084539 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/abca9c47-e52b-4410-83e1-b486f8f01aca-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "abca9c47-e52b-4410-83e1-b486f8f01aca" (UID: "abca9c47-e52b-4410-83e1-b486f8f01aca"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:10:57 crc kubenswrapper[4753]: I1205 17:10:57.091882 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "abca9c47-e52b-4410-83e1-b486f8f01aca" (UID: "abca9c47-e52b-4410-83e1-b486f8f01aca"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 05 17:10:57 crc kubenswrapper[4753]: I1205 17:10:57.156927 4753 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/abca9c47-e52b-4410-83e1-b486f8f01aca-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 05 17:10:57 crc kubenswrapper[4753]: I1205 17:10:57.156997 4753 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/abca9c47-e52b-4410-83e1-b486f8f01aca-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 05 17:10:57 crc kubenswrapper[4753]: I1205 17:10:57.157015 4753 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/abca9c47-e52b-4410-83e1-b486f8f01aca-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 05 17:10:57 crc kubenswrapper[4753]: I1205 17:10:57.157025 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s92jj\" (UniqueName: \"kubernetes.io/projected/abca9c47-e52b-4410-83e1-b486f8f01aca-kube-api-access-s92jj\") on node \"crc\" DevicePath \"\"" Dec 05 17:10:57 crc kubenswrapper[4753]: I1205 17:10:57.157036 4753 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/abca9c47-e52b-4410-83e1-b486f8f01aca-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 05 17:10:57 crc kubenswrapper[4753]: I1205 17:10:57.157046 4753 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/abca9c47-e52b-4410-83e1-b486f8f01aca-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 17:10:57 crc kubenswrapper[4753]: I1205 17:10:57.157057 4753 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/abca9c47-e52b-4410-83e1-b486f8f01aca-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 05 17:10:57 crc kubenswrapper[4753]: I1205 17:10:57.196169 4753 generic.go:334] "Generic (PLEG): container finished" podID="abca9c47-e52b-4410-83e1-b486f8f01aca" containerID="b17c4d232ba927af4baa91d985bf94d32f9e39a0b5b54283b901285c941dcf57" exitCode=0 Dec 05 17:10:57 crc kubenswrapper[4753]: I1205 17:10:57.196277 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" Dec 05 17:10:57 crc kubenswrapper[4753]: I1205 17:10:57.196255 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" event={"ID":"abca9c47-e52b-4410-83e1-b486f8f01aca","Type":"ContainerDied","Data":"b17c4d232ba927af4baa91d985bf94d32f9e39a0b5b54283b901285c941dcf57"} Dec 05 17:10:57 crc kubenswrapper[4753]: I1205 17:10:57.196430 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-4vvd9" event={"ID":"abca9c47-e52b-4410-83e1-b486f8f01aca","Type":"ContainerDied","Data":"3809342bf67fd621fbeb0c0a92c72f0904ee3d0994b143784f641de8ccbd319f"} Dec 05 17:10:57 crc kubenswrapper[4753]: I1205 17:10:57.196464 4753 scope.go:117] "RemoveContainer" containerID="b17c4d232ba927af4baa91d985bf94d32f9e39a0b5b54283b901285c941dcf57" Dec 05 17:10:57 crc kubenswrapper[4753]: I1205 17:10:57.224358 4753 scope.go:117] "RemoveContainer" containerID="b17c4d232ba927af4baa91d985bf94d32f9e39a0b5b54283b901285c941dcf57" Dec 05 17:10:57 crc kubenswrapper[4753]: E1205 17:10:57.225015 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b17c4d232ba927af4baa91d985bf94d32f9e39a0b5b54283b901285c941dcf57\": container with ID starting with b17c4d232ba927af4baa91d985bf94d32f9e39a0b5b54283b901285c941dcf57 not found: ID does not exist" containerID="b17c4d232ba927af4baa91d985bf94d32f9e39a0b5b54283b901285c941dcf57" Dec 05 17:10:57 crc kubenswrapper[4753]: I1205 17:10:57.225135 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b17c4d232ba927af4baa91d985bf94d32f9e39a0b5b54283b901285c941dcf57"} err="failed to get container status \"b17c4d232ba927af4baa91d985bf94d32f9e39a0b5b54283b901285c941dcf57\": rpc error: code = NotFound desc = could not find container \"b17c4d232ba927af4baa91d985bf94d32f9e39a0b5b54283b901285c941dcf57\": container with ID starting with b17c4d232ba927af4baa91d985bf94d32f9e39a0b5b54283b901285c941dcf57 not found: ID does not exist" Dec 05 17:10:57 crc kubenswrapper[4753]: I1205 17:10:57.238889 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-4vvd9"] Dec 05 17:10:57 crc kubenswrapper[4753]: I1205 17:10:57.243410 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-4vvd9"] Dec 05 17:10:57 crc kubenswrapper[4753]: I1205 17:10:57.727793 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="abca9c47-e52b-4410-83e1-b486f8f01aca" path="/var/lib/kubelet/pods/abca9c47-e52b-4410-83e1-b486f8f01aca/volumes" Dec 05 17:10:58 crc kubenswrapper[4753]: I1205 17:10:58.979753 4753 patch_prober.go:28] interesting pod/machine-config-daemon-khn68 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 17:10:58 crc kubenswrapper[4753]: I1205 17:10:58.981209 4753 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 17:11:21 crc kubenswrapper[4753]: I1205 17:11:21.864698 4753 scope.go:117] "RemoveContainer" containerID="20eb41ef00b7711fdfef0e4f3bd83b2fbdb193be96cafc0ef87f048271e7dd78" Dec 05 17:11:21 crc kubenswrapper[4753]: I1205 17:11:21.892369 4753 scope.go:117] "RemoveContainer" containerID="0585e546af65a61265267b97083556b811ad35ee6c48fa84262b3f4f25eaf907" Dec 05 17:11:21 crc kubenswrapper[4753]: I1205 17:11:21.908505 4753 scope.go:117] "RemoveContainer" containerID="d3a5f37b26bfe7933088e08379662250e7b188efbbb5784d49e8d86262e10416" Dec 05 17:11:21 crc kubenswrapper[4753]: I1205 17:11:21.925050 4753 scope.go:117] "RemoveContainer" containerID="195b8bf18e6cf937def214e377ec9def799c19ea4d5f008b66cc42048656ffce" Dec 05 17:11:21 crc kubenswrapper[4753]: I1205 17:11:21.944042 4753 scope.go:117] "RemoveContainer" containerID="559d475cfb259b570dcc941f5be394d24687136a22573b8f20cc91ce690e8ffd" Dec 05 17:11:21 crc kubenswrapper[4753]: I1205 17:11:21.970021 4753 scope.go:117] "RemoveContainer" containerID="c0061bf86592b301a0a9398d2938e614f76616ed7ddcf310aa50c961632fb58b" Dec 05 17:11:28 crc kubenswrapper[4753]: I1205 17:11:28.979654 4753 patch_prober.go:28] interesting pod/machine-config-daemon-khn68 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 17:11:28 crc kubenswrapper[4753]: I1205 17:11:28.980100 4753 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 17:11:28 crc kubenswrapper[4753]: I1205 17:11:28.980201 4753 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-khn68" Dec 05 17:11:28 crc kubenswrapper[4753]: I1205 17:11:28.980861 4753 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b75942e9b07cd0facb3635e22b2009b01e5b0d5b508c9825ab0bd49c3ba268e9"} pod="openshift-machine-config-operator/machine-config-daemon-khn68" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 17:11:28 crc kubenswrapper[4753]: I1205 17:11:28.980927 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerName="machine-config-daemon" containerID="cri-o://b75942e9b07cd0facb3635e22b2009b01e5b0d5b508c9825ab0bd49c3ba268e9" gracePeriod=600 Dec 05 17:11:29 crc kubenswrapper[4753]: I1205 17:11:29.460281 4753 generic.go:334] "Generic (PLEG): container finished" podID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerID="b75942e9b07cd0facb3635e22b2009b01e5b0d5b508c9825ab0bd49c3ba268e9" exitCode=0 Dec 05 17:11:29 crc kubenswrapper[4753]: I1205 17:11:29.460417 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" event={"ID":"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68","Type":"ContainerDied","Data":"b75942e9b07cd0facb3635e22b2009b01e5b0d5b508c9825ab0bd49c3ba268e9"} Dec 05 17:11:29 crc kubenswrapper[4753]: I1205 17:11:29.460756 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" event={"ID":"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68","Type":"ContainerStarted","Data":"048cd1f4edd965d0ae4cd5cc208b03c56a5a2a1c72e0809d8cf01232add1c614"} Dec 05 17:11:29 crc kubenswrapper[4753]: I1205 17:11:29.460788 4753 scope.go:117] "RemoveContainer" containerID="60c5d0ca3d26171050d592c3d76f0db4ca1a12344c40825a8e7c5fd579814c80" Dec 05 17:13:58 crc kubenswrapper[4753]: I1205 17:13:58.979096 4753 patch_prober.go:28] interesting pod/machine-config-daemon-khn68 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 17:13:58 crc kubenswrapper[4753]: I1205 17:13:58.980361 4753 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 17:14:28 crc kubenswrapper[4753]: I1205 17:14:28.979896 4753 patch_prober.go:28] interesting pod/machine-config-daemon-khn68 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 17:14:28 crc kubenswrapper[4753]: I1205 17:14:28.980802 4753 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 17:14:58 crc kubenswrapper[4753]: I1205 17:14:58.979469 4753 patch_prober.go:28] interesting pod/machine-config-daemon-khn68 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 17:14:58 crc kubenswrapper[4753]: I1205 17:14:58.980542 4753 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 17:14:58 crc kubenswrapper[4753]: I1205 17:14:58.980704 4753 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-khn68" Dec 05 17:14:58 crc kubenswrapper[4753]: I1205 17:14:58.982071 4753 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"048cd1f4edd965d0ae4cd5cc208b03c56a5a2a1c72e0809d8cf01232add1c614"} pod="openshift-machine-config-operator/machine-config-daemon-khn68" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 17:14:58 crc kubenswrapper[4753]: I1205 17:14:58.982239 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerName="machine-config-daemon" containerID="cri-o://048cd1f4edd965d0ae4cd5cc208b03c56a5a2a1c72e0809d8cf01232add1c614" gracePeriod=600 Dec 05 17:15:00 crc kubenswrapper[4753]: I1205 17:15:00.078520 4753 generic.go:334] "Generic (PLEG): container finished" podID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerID="048cd1f4edd965d0ae4cd5cc208b03c56a5a2a1c72e0809d8cf01232add1c614" exitCode=0 Dec 05 17:15:00 crc kubenswrapper[4753]: I1205 17:15:00.078607 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" event={"ID":"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68","Type":"ContainerDied","Data":"048cd1f4edd965d0ae4cd5cc208b03c56a5a2a1c72e0809d8cf01232add1c614"} Dec 05 17:15:00 crc kubenswrapper[4753]: I1205 17:15:00.079266 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" event={"ID":"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68","Type":"ContainerStarted","Data":"bba70de976ea341407c3eb9e1abd5171c31bb8762b60796e6f36abd1b897024d"} Dec 05 17:15:00 crc kubenswrapper[4753]: I1205 17:15:00.079300 4753 scope.go:117] "RemoveContainer" containerID="b75942e9b07cd0facb3635e22b2009b01e5b0d5b508c9825ab0bd49c3ba268e9" Dec 05 17:15:00 crc kubenswrapper[4753]: I1205 17:15:00.177979 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415915-698dn"] Dec 05 17:15:00 crc kubenswrapper[4753]: E1205 17:15:00.178316 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="abca9c47-e52b-4410-83e1-b486f8f01aca" containerName="registry" Dec 05 17:15:00 crc kubenswrapper[4753]: I1205 17:15:00.178338 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="abca9c47-e52b-4410-83e1-b486f8f01aca" containerName="registry" Dec 05 17:15:00 crc kubenswrapper[4753]: I1205 17:15:00.178502 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="abca9c47-e52b-4410-83e1-b486f8f01aca" containerName="registry" Dec 05 17:15:00 crc kubenswrapper[4753]: I1205 17:15:00.179112 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415915-698dn" Dec 05 17:15:00 crc kubenswrapper[4753]: I1205 17:15:00.182173 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 17:15:00 crc kubenswrapper[4753]: I1205 17:15:00.182260 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 17:15:00 crc kubenswrapper[4753]: I1205 17:15:00.191334 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415915-698dn"] Dec 05 17:15:00 crc kubenswrapper[4753]: I1205 17:15:00.286484 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8ed2f6f9-8648-4fdc-9d37-9ed8ed42ba24-config-volume\") pod \"collect-profiles-29415915-698dn\" (UID: \"8ed2f6f9-8648-4fdc-9d37-9ed8ed42ba24\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415915-698dn" Dec 05 17:15:00 crc kubenswrapper[4753]: I1205 17:15:00.286577 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t6zhz\" (UniqueName: \"kubernetes.io/projected/8ed2f6f9-8648-4fdc-9d37-9ed8ed42ba24-kube-api-access-t6zhz\") pod \"collect-profiles-29415915-698dn\" (UID: \"8ed2f6f9-8648-4fdc-9d37-9ed8ed42ba24\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415915-698dn" Dec 05 17:15:00 crc kubenswrapper[4753]: I1205 17:15:00.286598 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8ed2f6f9-8648-4fdc-9d37-9ed8ed42ba24-secret-volume\") pod \"collect-profiles-29415915-698dn\" (UID: \"8ed2f6f9-8648-4fdc-9d37-9ed8ed42ba24\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415915-698dn" Dec 05 17:15:00 crc kubenswrapper[4753]: I1205 17:15:00.388208 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t6zhz\" (UniqueName: \"kubernetes.io/projected/8ed2f6f9-8648-4fdc-9d37-9ed8ed42ba24-kube-api-access-t6zhz\") pod \"collect-profiles-29415915-698dn\" (UID: \"8ed2f6f9-8648-4fdc-9d37-9ed8ed42ba24\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415915-698dn" Dec 05 17:15:00 crc kubenswrapper[4753]: I1205 17:15:00.388276 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8ed2f6f9-8648-4fdc-9d37-9ed8ed42ba24-secret-volume\") pod \"collect-profiles-29415915-698dn\" (UID: \"8ed2f6f9-8648-4fdc-9d37-9ed8ed42ba24\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415915-698dn" Dec 05 17:15:00 crc kubenswrapper[4753]: I1205 17:15:00.388352 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8ed2f6f9-8648-4fdc-9d37-9ed8ed42ba24-config-volume\") pod \"collect-profiles-29415915-698dn\" (UID: \"8ed2f6f9-8648-4fdc-9d37-9ed8ed42ba24\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415915-698dn" Dec 05 17:15:00 crc kubenswrapper[4753]: I1205 17:15:00.389447 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8ed2f6f9-8648-4fdc-9d37-9ed8ed42ba24-config-volume\") pod \"collect-profiles-29415915-698dn\" (UID: \"8ed2f6f9-8648-4fdc-9d37-9ed8ed42ba24\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415915-698dn" Dec 05 17:15:00 crc kubenswrapper[4753]: I1205 17:15:00.408672 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8ed2f6f9-8648-4fdc-9d37-9ed8ed42ba24-secret-volume\") pod \"collect-profiles-29415915-698dn\" (UID: \"8ed2f6f9-8648-4fdc-9d37-9ed8ed42ba24\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415915-698dn" Dec 05 17:15:00 crc kubenswrapper[4753]: I1205 17:15:00.411073 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t6zhz\" (UniqueName: \"kubernetes.io/projected/8ed2f6f9-8648-4fdc-9d37-9ed8ed42ba24-kube-api-access-t6zhz\") pod \"collect-profiles-29415915-698dn\" (UID: \"8ed2f6f9-8648-4fdc-9d37-9ed8ed42ba24\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415915-698dn" Dec 05 17:15:00 crc kubenswrapper[4753]: I1205 17:15:00.498011 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415915-698dn" Dec 05 17:15:00 crc kubenswrapper[4753]: I1205 17:15:00.738898 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415915-698dn"] Dec 05 17:15:01 crc kubenswrapper[4753]: I1205 17:15:01.091295 4753 generic.go:334] "Generic (PLEG): container finished" podID="8ed2f6f9-8648-4fdc-9d37-9ed8ed42ba24" containerID="3d51108761e9c69403371d5b0a4bd55ebe069ce1a1d6399b50b88c15896ab41f" exitCode=0 Dec 05 17:15:01 crc kubenswrapper[4753]: I1205 17:15:01.091543 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415915-698dn" event={"ID":"8ed2f6f9-8648-4fdc-9d37-9ed8ed42ba24","Type":"ContainerDied","Data":"3d51108761e9c69403371d5b0a4bd55ebe069ce1a1d6399b50b88c15896ab41f"} Dec 05 17:15:01 crc kubenswrapper[4753]: I1205 17:15:01.091771 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415915-698dn" event={"ID":"8ed2f6f9-8648-4fdc-9d37-9ed8ed42ba24","Type":"ContainerStarted","Data":"bc3c24bb54e5b16a8a819e0b2742b633117e9da87accc5ed49e5fedfb1ee0cf5"} Dec 05 17:15:02 crc kubenswrapper[4753]: I1205 17:15:02.317415 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415915-698dn" Dec 05 17:15:02 crc kubenswrapper[4753]: I1205 17:15:02.321959 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t6zhz\" (UniqueName: \"kubernetes.io/projected/8ed2f6f9-8648-4fdc-9d37-9ed8ed42ba24-kube-api-access-t6zhz\") pod \"8ed2f6f9-8648-4fdc-9d37-9ed8ed42ba24\" (UID: \"8ed2f6f9-8648-4fdc-9d37-9ed8ed42ba24\") " Dec 05 17:15:02 crc kubenswrapper[4753]: I1205 17:15:02.322041 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8ed2f6f9-8648-4fdc-9d37-9ed8ed42ba24-secret-volume\") pod \"8ed2f6f9-8648-4fdc-9d37-9ed8ed42ba24\" (UID: \"8ed2f6f9-8648-4fdc-9d37-9ed8ed42ba24\") " Dec 05 17:15:02 crc kubenswrapper[4753]: I1205 17:15:02.322087 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8ed2f6f9-8648-4fdc-9d37-9ed8ed42ba24-config-volume\") pod \"8ed2f6f9-8648-4fdc-9d37-9ed8ed42ba24\" (UID: \"8ed2f6f9-8648-4fdc-9d37-9ed8ed42ba24\") " Dec 05 17:15:02 crc kubenswrapper[4753]: I1205 17:15:02.323219 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8ed2f6f9-8648-4fdc-9d37-9ed8ed42ba24-config-volume" (OuterVolumeSpecName: "config-volume") pod "8ed2f6f9-8648-4fdc-9d37-9ed8ed42ba24" (UID: "8ed2f6f9-8648-4fdc-9d37-9ed8ed42ba24"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:15:02 crc kubenswrapper[4753]: I1205 17:15:02.328589 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ed2f6f9-8648-4fdc-9d37-9ed8ed42ba24-kube-api-access-t6zhz" (OuterVolumeSpecName: "kube-api-access-t6zhz") pod "8ed2f6f9-8648-4fdc-9d37-9ed8ed42ba24" (UID: "8ed2f6f9-8648-4fdc-9d37-9ed8ed42ba24"). InnerVolumeSpecName "kube-api-access-t6zhz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:15:02 crc kubenswrapper[4753]: I1205 17:15:02.330202 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ed2f6f9-8648-4fdc-9d37-9ed8ed42ba24-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "8ed2f6f9-8648-4fdc-9d37-9ed8ed42ba24" (UID: "8ed2f6f9-8648-4fdc-9d37-9ed8ed42ba24"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:15:02 crc kubenswrapper[4753]: I1205 17:15:02.423791 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t6zhz\" (UniqueName: \"kubernetes.io/projected/8ed2f6f9-8648-4fdc-9d37-9ed8ed42ba24-kube-api-access-t6zhz\") on node \"crc\" DevicePath \"\"" Dec 05 17:15:02 crc kubenswrapper[4753]: I1205 17:15:02.423874 4753 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8ed2f6f9-8648-4fdc-9d37-9ed8ed42ba24-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 17:15:02 crc kubenswrapper[4753]: I1205 17:15:02.423885 4753 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8ed2f6f9-8648-4fdc-9d37-9ed8ed42ba24-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 17:15:03 crc kubenswrapper[4753]: I1205 17:15:03.115336 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415915-698dn" event={"ID":"8ed2f6f9-8648-4fdc-9d37-9ed8ed42ba24","Type":"ContainerDied","Data":"bc3c24bb54e5b16a8a819e0b2742b633117e9da87accc5ed49e5fedfb1ee0cf5"} Dec 05 17:15:03 crc kubenswrapper[4753]: I1205 17:15:03.115949 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bc3c24bb54e5b16a8a819e0b2742b633117e9da87accc5ed49e5fedfb1ee0cf5" Dec 05 17:15:03 crc kubenswrapper[4753]: I1205 17:15:03.115655 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415915-698dn" Dec 05 17:15:52 crc kubenswrapper[4753]: I1205 17:15:52.866507 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210q5h8h"] Dec 05 17:15:52 crc kubenswrapper[4753]: E1205 17:15:52.867692 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ed2f6f9-8648-4fdc-9d37-9ed8ed42ba24" containerName="collect-profiles" Dec 05 17:15:52 crc kubenswrapper[4753]: I1205 17:15:52.867711 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ed2f6f9-8648-4fdc-9d37-9ed8ed42ba24" containerName="collect-profiles" Dec 05 17:15:52 crc kubenswrapper[4753]: I1205 17:15:52.867859 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ed2f6f9-8648-4fdc-9d37-9ed8ed42ba24" containerName="collect-profiles" Dec 05 17:15:52 crc kubenswrapper[4753]: I1205 17:15:52.868905 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210q5h8h" Dec 05 17:15:52 crc kubenswrapper[4753]: I1205 17:15:52.871980 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 05 17:15:52 crc kubenswrapper[4753]: I1205 17:15:52.880081 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210q5h8h"] Dec 05 17:15:52 crc kubenswrapper[4753]: I1205 17:15:52.950854 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f2d3390d-b8d7-48c7-ab98-a4cfb3ebdf9c-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210q5h8h\" (UID: \"f2d3390d-b8d7-48c7-ab98-a4cfb3ebdf9c\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210q5h8h" Dec 05 17:15:52 crc kubenswrapper[4753]: I1205 17:15:52.950940 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s8nfd\" (UniqueName: \"kubernetes.io/projected/f2d3390d-b8d7-48c7-ab98-a4cfb3ebdf9c-kube-api-access-s8nfd\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210q5h8h\" (UID: \"f2d3390d-b8d7-48c7-ab98-a4cfb3ebdf9c\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210q5h8h" Dec 05 17:15:52 crc kubenswrapper[4753]: I1205 17:15:52.950974 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f2d3390d-b8d7-48c7-ab98-a4cfb3ebdf9c-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210q5h8h\" (UID: \"f2d3390d-b8d7-48c7-ab98-a4cfb3ebdf9c\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210q5h8h" Dec 05 17:15:53 crc kubenswrapper[4753]: I1205 17:15:53.052065 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f2d3390d-b8d7-48c7-ab98-a4cfb3ebdf9c-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210q5h8h\" (UID: \"f2d3390d-b8d7-48c7-ab98-a4cfb3ebdf9c\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210q5h8h" Dec 05 17:15:53 crc kubenswrapper[4753]: I1205 17:15:53.052158 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s8nfd\" (UniqueName: \"kubernetes.io/projected/f2d3390d-b8d7-48c7-ab98-a4cfb3ebdf9c-kube-api-access-s8nfd\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210q5h8h\" (UID: \"f2d3390d-b8d7-48c7-ab98-a4cfb3ebdf9c\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210q5h8h" Dec 05 17:15:53 crc kubenswrapper[4753]: I1205 17:15:53.052193 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f2d3390d-b8d7-48c7-ab98-a4cfb3ebdf9c-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210q5h8h\" (UID: \"f2d3390d-b8d7-48c7-ab98-a4cfb3ebdf9c\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210q5h8h" Dec 05 17:15:53 crc kubenswrapper[4753]: I1205 17:15:53.053236 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f2d3390d-b8d7-48c7-ab98-a4cfb3ebdf9c-util\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210q5h8h\" (UID: \"f2d3390d-b8d7-48c7-ab98-a4cfb3ebdf9c\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210q5h8h" Dec 05 17:15:53 crc kubenswrapper[4753]: I1205 17:15:53.053278 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f2d3390d-b8d7-48c7-ab98-a4cfb3ebdf9c-bundle\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210q5h8h\" (UID: \"f2d3390d-b8d7-48c7-ab98-a4cfb3ebdf9c\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210q5h8h" Dec 05 17:15:53 crc kubenswrapper[4753]: I1205 17:15:53.078465 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s8nfd\" (UniqueName: \"kubernetes.io/projected/f2d3390d-b8d7-48c7-ab98-a4cfb3ebdf9c-kube-api-access-s8nfd\") pod \"6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210q5h8h\" (UID: \"f2d3390d-b8d7-48c7-ab98-a4cfb3ebdf9c\") " pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210q5h8h" Dec 05 17:15:53 crc kubenswrapper[4753]: I1205 17:15:53.188212 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210q5h8h" Dec 05 17:15:53 crc kubenswrapper[4753]: I1205 17:15:53.398443 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210q5h8h"] Dec 05 17:15:53 crc kubenswrapper[4753]: I1205 17:15:53.503329 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210q5h8h" event={"ID":"f2d3390d-b8d7-48c7-ab98-a4cfb3ebdf9c","Type":"ContainerStarted","Data":"15055a2bf7bea41271fa33071bef7bd260bffb2b0409d4b8185afe1f50efcd01"} Dec 05 17:15:54 crc kubenswrapper[4753]: I1205 17:15:54.514721 4753 generic.go:334] "Generic (PLEG): container finished" podID="f2d3390d-b8d7-48c7-ab98-a4cfb3ebdf9c" containerID="89d617488881ea551da8d5f7b1b4de779fae8d85edebd057f128f25d066bfd46" exitCode=0 Dec 05 17:15:54 crc kubenswrapper[4753]: I1205 17:15:54.514788 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210q5h8h" event={"ID":"f2d3390d-b8d7-48c7-ab98-a4cfb3ebdf9c","Type":"ContainerDied","Data":"89d617488881ea551da8d5f7b1b4de779fae8d85edebd057f128f25d066bfd46"} Dec 05 17:15:54 crc kubenswrapper[4753]: I1205 17:15:54.517256 4753 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 17:15:56 crc kubenswrapper[4753]: I1205 17:15:56.532129 4753 generic.go:334] "Generic (PLEG): container finished" podID="f2d3390d-b8d7-48c7-ab98-a4cfb3ebdf9c" containerID="fa2cf6674356bbcb1673ccaf20c6b9a617d3bbe5d494397fbe44db924b7e09b3" exitCode=0 Dec 05 17:15:56 crc kubenswrapper[4753]: I1205 17:15:56.532199 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210q5h8h" event={"ID":"f2d3390d-b8d7-48c7-ab98-a4cfb3ebdf9c","Type":"ContainerDied","Data":"fa2cf6674356bbcb1673ccaf20c6b9a617d3bbe5d494397fbe44db924b7e09b3"} Dec 05 17:15:57 crc kubenswrapper[4753]: I1205 17:15:57.545560 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210q5h8h" event={"ID":"f2d3390d-b8d7-48c7-ab98-a4cfb3ebdf9c","Type":"ContainerDied","Data":"77aa4db91895a2e7731a54a5e02f0fc9756c6fb22930186082e8b1366705dcad"} Dec 05 17:15:57 crc kubenswrapper[4753]: I1205 17:15:57.545467 4753 generic.go:334] "Generic (PLEG): container finished" podID="f2d3390d-b8d7-48c7-ab98-a4cfb3ebdf9c" containerID="77aa4db91895a2e7731a54a5e02f0fc9756c6fb22930186082e8b1366705dcad" exitCode=0 Dec 05 17:15:58 crc kubenswrapper[4753]: I1205 17:15:58.812601 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210q5h8h" Dec 05 17:15:58 crc kubenswrapper[4753]: I1205 17:15:58.852589 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f2d3390d-b8d7-48c7-ab98-a4cfb3ebdf9c-util\") pod \"f2d3390d-b8d7-48c7-ab98-a4cfb3ebdf9c\" (UID: \"f2d3390d-b8d7-48c7-ab98-a4cfb3ebdf9c\") " Dec 05 17:15:58 crc kubenswrapper[4753]: I1205 17:15:58.852650 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f2d3390d-b8d7-48c7-ab98-a4cfb3ebdf9c-bundle\") pod \"f2d3390d-b8d7-48c7-ab98-a4cfb3ebdf9c\" (UID: \"f2d3390d-b8d7-48c7-ab98-a4cfb3ebdf9c\") " Dec 05 17:15:58 crc kubenswrapper[4753]: I1205 17:15:58.852681 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s8nfd\" (UniqueName: \"kubernetes.io/projected/f2d3390d-b8d7-48c7-ab98-a4cfb3ebdf9c-kube-api-access-s8nfd\") pod \"f2d3390d-b8d7-48c7-ab98-a4cfb3ebdf9c\" (UID: \"f2d3390d-b8d7-48c7-ab98-a4cfb3ebdf9c\") " Dec 05 17:15:58 crc kubenswrapper[4753]: I1205 17:15:58.855376 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f2d3390d-b8d7-48c7-ab98-a4cfb3ebdf9c-bundle" (OuterVolumeSpecName: "bundle") pod "f2d3390d-b8d7-48c7-ab98-a4cfb3ebdf9c" (UID: "f2d3390d-b8d7-48c7-ab98-a4cfb3ebdf9c"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:15:58 crc kubenswrapper[4753]: I1205 17:15:58.863390 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f2d3390d-b8d7-48c7-ab98-a4cfb3ebdf9c-kube-api-access-s8nfd" (OuterVolumeSpecName: "kube-api-access-s8nfd") pod "f2d3390d-b8d7-48c7-ab98-a4cfb3ebdf9c" (UID: "f2d3390d-b8d7-48c7-ab98-a4cfb3ebdf9c"). InnerVolumeSpecName "kube-api-access-s8nfd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:15:58 crc kubenswrapper[4753]: I1205 17:15:58.911290 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f2d3390d-b8d7-48c7-ab98-a4cfb3ebdf9c-util" (OuterVolumeSpecName: "util") pod "f2d3390d-b8d7-48c7-ab98-a4cfb3ebdf9c" (UID: "f2d3390d-b8d7-48c7-ab98-a4cfb3ebdf9c"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:15:58 crc kubenswrapper[4753]: I1205 17:15:58.954439 4753 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f2d3390d-b8d7-48c7-ab98-a4cfb3ebdf9c-util\") on node \"crc\" DevicePath \"\"" Dec 05 17:15:58 crc kubenswrapper[4753]: I1205 17:15:58.954491 4753 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f2d3390d-b8d7-48c7-ab98-a4cfb3ebdf9c-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:15:58 crc kubenswrapper[4753]: I1205 17:15:58.954505 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s8nfd\" (UniqueName: \"kubernetes.io/projected/f2d3390d-b8d7-48c7-ab98-a4cfb3ebdf9c-kube-api-access-s8nfd\") on node \"crc\" DevicePath \"\"" Dec 05 17:15:59 crc kubenswrapper[4753]: I1205 17:15:59.560518 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210q5h8h" event={"ID":"f2d3390d-b8d7-48c7-ab98-a4cfb3ebdf9c","Type":"ContainerDied","Data":"15055a2bf7bea41271fa33071bef7bd260bffb2b0409d4b8185afe1f50efcd01"} Dec 05 17:15:59 crc kubenswrapper[4753]: I1205 17:15:59.560566 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="15055a2bf7bea41271fa33071bef7bd260bffb2b0409d4b8185afe1f50efcd01" Dec 05 17:15:59 crc kubenswrapper[4753]: I1205 17:15:59.560638 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210q5h8h" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.045992 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-98fvv"] Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.047969 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" podUID="f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" containerName="ovn-controller" containerID="cri-o://9ead15f2a3179f03d770beadcaa1374561d5aebb4874615a334edccff0c2f300" gracePeriod=30 Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.048043 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" podUID="f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://0cb72322d436f10b006a4bb7a91b255451aba90e86100fdf249be6443159bd89" gracePeriod=30 Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.048037 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" podUID="f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" containerName="nbdb" containerID="cri-o://8d0273d889da0bd6ea3db8040da2eab62fc750f308cb9a62dee71b6c9f2fadd9" gracePeriod=30 Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.048227 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" podUID="f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" containerName="ovn-acl-logging" containerID="cri-o://20bcab2c2912120c819bacdf478db82a78500908125e6efadaef1f3409eb0d49" gracePeriod=30 Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.048206 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" podUID="f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" containerName="kube-rbac-proxy-node" containerID="cri-o://6f6585b3b62fc4a78f89b3413326d6d5259cb8c338936f2d5def6185d81d46f2" gracePeriod=30 Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.048239 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" podUID="f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" containerName="northd" containerID="cri-o://e79e99eb44ff95c2f9929326313e41629e6e03ebc068537bd27bc4c89ad5b0ec" gracePeriod=30 Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.048322 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" podUID="f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" containerName="sbdb" containerID="cri-o://899db0b5d45776b0560a2908ce63ec8ea2b458e2e492189737e930688b4ed2e2" gracePeriod=30 Dec 05 17:16:04 crc kubenswrapper[4753]: E1205 17:16:04.086101 4753 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8d0273d889da0bd6ea3db8040da2eab62fc750f308cb9a62dee71b6c9f2fadd9" cmd=["/bin/bash","-c","set -xeo pipefail\n. /ovnkube-lib/ovnkube-lib.sh || exit 1\novndb-readiness-probe \"nb\"\n"] Dec 05 17:16:04 crc kubenswrapper[4753]: E1205 17:16:04.089407 4753 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8d0273d889da0bd6ea3db8040da2eab62fc750f308cb9a62dee71b6c9f2fadd9" cmd=["/bin/bash","-c","set -xeo pipefail\n. /ovnkube-lib/ovnkube-lib.sh || exit 1\novndb-readiness-probe \"nb\"\n"] Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.090696 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" podUID="f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" containerName="ovnkube-controller" containerID="cri-o://a1c3ee586bdec79f6c186af02f1a6c42318a899455efe849a1d5d1f61c9274d0" gracePeriod=30 Dec 05 17:16:04 crc kubenswrapper[4753]: E1205 17:16:04.091592 4753 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="899db0b5d45776b0560a2908ce63ec8ea2b458e2e492189737e930688b4ed2e2" cmd=["/bin/bash","-c","set -xeo pipefail\n. /ovnkube-lib/ovnkube-lib.sh || exit 1\novndb-readiness-probe \"sb\"\n"] Dec 05 17:16:04 crc kubenswrapper[4753]: E1205 17:16:04.091959 4753 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8d0273d889da0bd6ea3db8040da2eab62fc750f308cb9a62dee71b6c9f2fadd9" cmd=["/bin/bash","-c","set -xeo pipefail\n. /ovnkube-lib/ovnkube-lib.sh || exit 1\novndb-readiness-probe \"nb\"\n"] Dec 05 17:16:04 crc kubenswrapper[4753]: E1205 17:16:04.092005 4753 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" podUID="f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" containerName="nbdb" Dec 05 17:16:04 crc kubenswrapper[4753]: E1205 17:16:04.100599 4753 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="899db0b5d45776b0560a2908ce63ec8ea2b458e2e492189737e930688b4ed2e2" cmd=["/bin/bash","-c","set -xeo pipefail\n. /ovnkube-lib/ovnkube-lib.sh || exit 1\novndb-readiness-probe \"sb\"\n"] Dec 05 17:16:04 crc kubenswrapper[4753]: E1205 17:16:04.102418 4753 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="899db0b5d45776b0560a2908ce63ec8ea2b458e2e492189737e930688b4ed2e2" cmd=["/bin/bash","-c","set -xeo pipefail\n. /ovnkube-lib/ovnkube-lib.sh || exit 1\novndb-readiness-probe \"sb\"\n"] Dec 05 17:16:04 crc kubenswrapper[4753]: E1205 17:16:04.102465 4753 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" podUID="f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" containerName="sbdb" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.596459 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-hpl8r_6b3d3501-4f16-4375-adf2-fd54b1cd13cf/kube-multus/2.log" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.597277 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-hpl8r_6b3d3501-4f16-4375-adf2-fd54b1cd13cf/kube-multus/1.log" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.597333 4753 generic.go:334] "Generic (PLEG): container finished" podID="6b3d3501-4f16-4375-adf2-fd54b1cd13cf" containerID="eedfb2d21b98fca57c1ceac0b8e177906400b69a7b79c834b4a5059d01f98efe" exitCode=2 Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.597426 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-hpl8r" event={"ID":"6b3d3501-4f16-4375-adf2-fd54b1cd13cf","Type":"ContainerDied","Data":"eedfb2d21b98fca57c1ceac0b8e177906400b69a7b79c834b4a5059d01f98efe"} Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.597489 4753 scope.go:117] "RemoveContainer" containerID="1096ebd14458819c0dbe4f5a86f2812eb19236d7d5062bfe2eda62ecdc05c55e" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.598615 4753 scope.go:117] "RemoveContainer" containerID="eedfb2d21b98fca57c1ceac0b8e177906400b69a7b79c834b4a5059d01f98efe" Dec 05 17:16:04 crc kubenswrapper[4753]: E1205 17:16:04.599175 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-hpl8r_openshift-multus(6b3d3501-4f16-4375-adf2-fd54b1cd13cf)\"" pod="openshift-multus/multus-hpl8r" podUID="6b3d3501-4f16-4375-adf2-fd54b1cd13cf" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.603479 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-98fvv_f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a/ovnkube-controller/3.log" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.608398 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-98fvv_f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a/ovn-acl-logging/0.log" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.608982 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-98fvv_f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a/ovn-controller/0.log" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.609491 4753 generic.go:334] "Generic (PLEG): container finished" podID="f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" containerID="a1c3ee586bdec79f6c186af02f1a6c42318a899455efe849a1d5d1f61c9274d0" exitCode=0 Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.609517 4753 generic.go:334] "Generic (PLEG): container finished" podID="f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" containerID="899db0b5d45776b0560a2908ce63ec8ea2b458e2e492189737e930688b4ed2e2" exitCode=0 Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.609526 4753 generic.go:334] "Generic (PLEG): container finished" podID="f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" containerID="8d0273d889da0bd6ea3db8040da2eab62fc750f308cb9a62dee71b6c9f2fadd9" exitCode=0 Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.609534 4753 generic.go:334] "Generic (PLEG): container finished" podID="f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" containerID="e79e99eb44ff95c2f9929326313e41629e6e03ebc068537bd27bc4c89ad5b0ec" exitCode=0 Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.609545 4753 generic.go:334] "Generic (PLEG): container finished" podID="f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" containerID="0cb72322d436f10b006a4bb7a91b255451aba90e86100fdf249be6443159bd89" exitCode=0 Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.609553 4753 generic.go:334] "Generic (PLEG): container finished" podID="f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" containerID="6f6585b3b62fc4a78f89b3413326d6d5259cb8c338936f2d5def6185d81d46f2" exitCode=0 Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.609561 4753 generic.go:334] "Generic (PLEG): container finished" podID="f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" containerID="20bcab2c2912120c819bacdf478db82a78500908125e6efadaef1f3409eb0d49" exitCode=143 Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.609570 4753 generic.go:334] "Generic (PLEG): container finished" podID="f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" containerID="9ead15f2a3179f03d770beadcaa1374561d5aebb4874615a334edccff0c2f300" exitCode=143 Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.609593 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" event={"ID":"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a","Type":"ContainerDied","Data":"a1c3ee586bdec79f6c186af02f1a6c42318a899455efe849a1d5d1f61c9274d0"} Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.609623 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" event={"ID":"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a","Type":"ContainerDied","Data":"899db0b5d45776b0560a2908ce63ec8ea2b458e2e492189737e930688b4ed2e2"} Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.609636 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" event={"ID":"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a","Type":"ContainerDied","Data":"8d0273d889da0bd6ea3db8040da2eab62fc750f308cb9a62dee71b6c9f2fadd9"} Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.609645 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" event={"ID":"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a","Type":"ContainerDied","Data":"e79e99eb44ff95c2f9929326313e41629e6e03ebc068537bd27bc4c89ad5b0ec"} Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.609654 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" event={"ID":"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a","Type":"ContainerDied","Data":"0cb72322d436f10b006a4bb7a91b255451aba90e86100fdf249be6443159bd89"} Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.609668 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" event={"ID":"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a","Type":"ContainerDied","Data":"6f6585b3b62fc4a78f89b3413326d6d5259cb8c338936f2d5def6185d81d46f2"} Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.609679 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" event={"ID":"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a","Type":"ContainerDied","Data":"20bcab2c2912120c819bacdf478db82a78500908125e6efadaef1f3409eb0d49"} Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.609692 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" event={"ID":"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a","Type":"ContainerDied","Data":"9ead15f2a3179f03d770beadcaa1374561d5aebb4874615a334edccff0c2f300"} Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.666307 4753 scope.go:117] "RemoveContainer" containerID="7a20782d19103ad754e8771413e6ca949a706a6876c2e949437969541c112d24" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.752336 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-98fvv_f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a/ovn-acl-logging/0.log" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.753092 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-98fvv_f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a/ovn-controller/0.log" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.754585 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.833706 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-68f4b"] Dec 05 17:16:04 crc kubenswrapper[4753]: E1205 17:16:04.834012 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" containerName="kube-rbac-proxy-node" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.834031 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" containerName="kube-rbac-proxy-node" Dec 05 17:16:04 crc kubenswrapper[4753]: E1205 17:16:04.834048 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" containerName="ovnkube-controller" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.834056 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" containerName="ovnkube-controller" Dec 05 17:16:04 crc kubenswrapper[4753]: E1205 17:16:04.834068 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f2d3390d-b8d7-48c7-ab98-a4cfb3ebdf9c" containerName="util" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.834075 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="f2d3390d-b8d7-48c7-ab98-a4cfb3ebdf9c" containerName="util" Dec 05 17:16:04 crc kubenswrapper[4753]: E1205 17:16:04.834083 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" containerName="ovnkube-controller" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.834089 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" containerName="ovnkube-controller" Dec 05 17:16:04 crc kubenswrapper[4753]: E1205 17:16:04.834098 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f2d3390d-b8d7-48c7-ab98-a4cfb3ebdf9c" containerName="extract" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.834104 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="f2d3390d-b8d7-48c7-ab98-a4cfb3ebdf9c" containerName="extract" Dec 05 17:16:04 crc kubenswrapper[4753]: E1205 17:16:04.834110 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" containerName="ovnkube-controller" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.834115 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" containerName="ovnkube-controller" Dec 05 17:16:04 crc kubenswrapper[4753]: E1205 17:16:04.834125 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" containerName="nbdb" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.834131 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" containerName="nbdb" Dec 05 17:16:04 crc kubenswrapper[4753]: E1205 17:16:04.834240 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" containerName="kube-rbac-proxy-ovn-metrics" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.834250 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" containerName="kube-rbac-proxy-ovn-metrics" Dec 05 17:16:04 crc kubenswrapper[4753]: E1205 17:16:04.834261 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" containerName="northd" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.834267 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" containerName="northd" Dec 05 17:16:04 crc kubenswrapper[4753]: E1205 17:16:04.834274 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" containerName="ovnkube-controller" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.834280 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" containerName="ovnkube-controller" Dec 05 17:16:04 crc kubenswrapper[4753]: E1205 17:16:04.834291 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" containerName="kubecfg-setup" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.834297 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" containerName="kubecfg-setup" Dec 05 17:16:04 crc kubenswrapper[4753]: E1205 17:16:04.834308 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" containerName="sbdb" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.834314 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" containerName="sbdb" Dec 05 17:16:04 crc kubenswrapper[4753]: E1205 17:16:04.834323 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" containerName="ovn-acl-logging" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.834329 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" containerName="ovn-acl-logging" Dec 05 17:16:04 crc kubenswrapper[4753]: E1205 17:16:04.834338 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" containerName="ovn-controller" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.834344 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" containerName="ovn-controller" Dec 05 17:16:04 crc kubenswrapper[4753]: E1205 17:16:04.834352 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f2d3390d-b8d7-48c7-ab98-a4cfb3ebdf9c" containerName="pull" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.834357 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="f2d3390d-b8d7-48c7-ab98-a4cfb3ebdf9c" containerName="pull" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.834452 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" containerName="ovnkube-controller" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.834461 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" containerName="ovn-acl-logging" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.834469 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" containerName="northd" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.834476 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" containerName="nbdb" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.834485 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" containerName="sbdb" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.834496 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="f2d3390d-b8d7-48c7-ab98-a4cfb3ebdf9c" containerName="extract" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.834505 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" containerName="kube-rbac-proxy-ovn-metrics" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.834514 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" containerName="ovnkube-controller" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.834522 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" containerName="ovnkube-controller" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.834529 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" containerName="ovn-controller" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.834538 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" containerName="kube-rbac-proxy-node" Dec 05 17:16:04 crc kubenswrapper[4753]: E1205 17:16:04.834631 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" containerName="ovnkube-controller" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.834639 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" containerName="ovnkube-controller" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.834732 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" containerName="ovnkube-controller" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.834923 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" containerName="ovnkube-controller" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.836529 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.856728 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-run-openvswitch\") pod \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.856766 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-systemd-units\") pod \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.856814 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-host-cni-netd\") pod \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.856840 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-log-socket\") pod \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.856881 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-etc-openvswitch\") pod \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.856874 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" (UID: "f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.856917 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" (UID: "f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.856981 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" (UID: "f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.857025 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-run-ovn\") pod \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.857005 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-log-socket" (OuterVolumeSpecName: "log-socket") pod "f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" (UID: "f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.857035 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" (UID: "f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.857091 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-host-slash\") pod \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.857100 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-host-slash" (OuterVolumeSpecName: "host-slash") pod "f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" (UID: "f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.857199 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" (UID: "f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.857191 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" (UID: "f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.857235 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-host-var-lib-cni-networks-ovn-kubernetes\") pod \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.857281 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2fgl2\" (UniqueName: \"kubernetes.io/projected/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-kube-api-access-2fgl2\") pod \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.857308 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-host-run-ovn-kubernetes\") pod \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.857354 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-ovn-node-metrics-cert\") pod \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.857398 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" (UID: "f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.857413 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-run-systemd\") pod \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.857470 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-host-run-netns\") pod \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.857549 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" (UID: "f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.857954 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" (UID: "f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.857497 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-env-overrides\") pod \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.858468 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-node-log\") pod \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.858506 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-node-log" (OuterVolumeSpecName: "node-log") pod "f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" (UID: "f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.858526 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-ovnkube-config\") pod \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.858550 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-var-lib-openvswitch\") pod \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.858628 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" (UID: "f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.858831 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" (UID: "f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.858936 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" (UID: "f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.858973 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-host-cni-bin\") pod \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.859002 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-ovnkube-script-lib\") pod \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.859027 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-host-kubelet\") pod \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\" (UID: \"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a\") " Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.859221 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/23d2265d-6549-4367-ac82-1867a8497b40-host-run-netns\") pod \"ovnkube-node-68f4b\" (UID: \"23d2265d-6549-4367-ac82-1867a8497b40\") " pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.859392 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/23d2265d-6549-4367-ac82-1867a8497b40-host-kubelet\") pod \"ovnkube-node-68f4b\" (UID: \"23d2265d-6549-4367-ac82-1867a8497b40\") " pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.859410 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" (UID: "f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.859422 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/23d2265d-6549-4367-ac82-1867a8497b40-run-openvswitch\") pod \"ovnkube-node-68f4b\" (UID: \"23d2265d-6549-4367-ac82-1867a8497b40\") " pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.859444 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/23d2265d-6549-4367-ac82-1867a8497b40-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-68f4b\" (UID: \"23d2265d-6549-4367-ac82-1867a8497b40\") " pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.859466 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" (UID: "f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.859473 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/23d2265d-6549-4367-ac82-1867a8497b40-run-systemd\") pod \"ovnkube-node-68f4b\" (UID: \"23d2265d-6549-4367-ac82-1867a8497b40\") " pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.859570 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/23d2265d-6549-4367-ac82-1867a8497b40-systemd-units\") pod \"ovnkube-node-68f4b\" (UID: \"23d2265d-6549-4367-ac82-1867a8497b40\") " pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.859608 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/23d2265d-6549-4367-ac82-1867a8497b40-host-run-ovn-kubernetes\") pod \"ovnkube-node-68f4b\" (UID: \"23d2265d-6549-4367-ac82-1867a8497b40\") " pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.859636 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/23d2265d-6549-4367-ac82-1867a8497b40-etc-openvswitch\") pod \"ovnkube-node-68f4b\" (UID: \"23d2265d-6549-4367-ac82-1867a8497b40\") " pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.859657 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/23d2265d-6549-4367-ac82-1867a8497b40-ovnkube-config\") pod \"ovnkube-node-68f4b\" (UID: \"23d2265d-6549-4367-ac82-1867a8497b40\") " pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.859678 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/23d2265d-6549-4367-ac82-1867a8497b40-node-log\") pod \"ovnkube-node-68f4b\" (UID: \"23d2265d-6549-4367-ac82-1867a8497b40\") " pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.859698 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/23d2265d-6549-4367-ac82-1867a8497b40-env-overrides\") pod \"ovnkube-node-68f4b\" (UID: \"23d2265d-6549-4367-ac82-1867a8497b40\") " pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.859716 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/23d2265d-6549-4367-ac82-1867a8497b40-run-ovn\") pod \"ovnkube-node-68f4b\" (UID: \"23d2265d-6549-4367-ac82-1867a8497b40\") " pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.859735 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/23d2265d-6549-4367-ac82-1867a8497b40-log-socket\") pod \"ovnkube-node-68f4b\" (UID: \"23d2265d-6549-4367-ac82-1867a8497b40\") " pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.859772 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/23d2265d-6549-4367-ac82-1867a8497b40-ovnkube-script-lib\") pod \"ovnkube-node-68f4b\" (UID: \"23d2265d-6549-4367-ac82-1867a8497b40\") " pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.859789 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/23d2265d-6549-4367-ac82-1867a8497b40-host-cni-bin\") pod \"ovnkube-node-68f4b\" (UID: \"23d2265d-6549-4367-ac82-1867a8497b40\") " pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.859813 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/23d2265d-6549-4367-ac82-1867a8497b40-host-cni-netd\") pod \"ovnkube-node-68f4b\" (UID: \"23d2265d-6549-4367-ac82-1867a8497b40\") " pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.859831 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/23d2265d-6549-4367-ac82-1867a8497b40-host-slash\") pod \"ovnkube-node-68f4b\" (UID: \"23d2265d-6549-4367-ac82-1867a8497b40\") " pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.859854 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4c6dp\" (UniqueName: \"kubernetes.io/projected/23d2265d-6549-4367-ac82-1867a8497b40-kube-api-access-4c6dp\") pod \"ovnkube-node-68f4b\" (UID: \"23d2265d-6549-4367-ac82-1867a8497b40\") " pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.859876 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/23d2265d-6549-4367-ac82-1867a8497b40-ovn-node-metrics-cert\") pod \"ovnkube-node-68f4b\" (UID: \"23d2265d-6549-4367-ac82-1867a8497b40\") " pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.859901 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/23d2265d-6549-4367-ac82-1867a8497b40-var-lib-openvswitch\") pod \"ovnkube-node-68f4b\" (UID: \"23d2265d-6549-4367-ac82-1867a8497b40\") " pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.859967 4753 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.859978 4753 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.859990 4753 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-host-slash\") on node \"crc\" DevicePath \"\"" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.860001 4753 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.860012 4753 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.860022 4753 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-host-run-netns\") on node \"crc\" DevicePath \"\"" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.860033 4753 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.860285 4753 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-node-log\") on node \"crc\" DevicePath \"\"" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.860330 4753 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.860346 4753 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-host-cni-bin\") on node \"crc\" DevicePath \"\"" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.860396 4753 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.860423 4753 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.860438 4753 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-host-kubelet\") on node \"crc\" DevicePath \"\"" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.860448 4753 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-run-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.860458 4753 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-systemd-units\") on node \"crc\" DevicePath \"\"" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.860473 4753 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-host-cni-netd\") on node \"crc\" DevicePath \"\"" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.860485 4753 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-log-socket\") on node \"crc\" DevicePath \"\"" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.865468 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" (UID: "f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.866977 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-kube-api-access-2fgl2" (OuterVolumeSpecName: "kube-api-access-2fgl2") pod "f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" (UID: "f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a"). InnerVolumeSpecName "kube-api-access-2fgl2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.878777 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" (UID: "f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.961829 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/23d2265d-6549-4367-ac82-1867a8497b40-host-cni-bin\") pod \"ovnkube-node-68f4b\" (UID: \"23d2265d-6549-4367-ac82-1867a8497b40\") " pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.961912 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/23d2265d-6549-4367-ac82-1867a8497b40-host-cni-netd\") pod \"ovnkube-node-68f4b\" (UID: \"23d2265d-6549-4367-ac82-1867a8497b40\") " pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.961942 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/23d2265d-6549-4367-ac82-1867a8497b40-host-slash\") pod \"ovnkube-node-68f4b\" (UID: \"23d2265d-6549-4367-ac82-1867a8497b40\") " pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.961975 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4c6dp\" (UniqueName: \"kubernetes.io/projected/23d2265d-6549-4367-ac82-1867a8497b40-kube-api-access-4c6dp\") pod \"ovnkube-node-68f4b\" (UID: \"23d2265d-6549-4367-ac82-1867a8497b40\") " pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.962001 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/23d2265d-6549-4367-ac82-1867a8497b40-ovn-node-metrics-cert\") pod \"ovnkube-node-68f4b\" (UID: \"23d2265d-6549-4367-ac82-1867a8497b40\") " pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.961989 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/23d2265d-6549-4367-ac82-1867a8497b40-host-cni-bin\") pod \"ovnkube-node-68f4b\" (UID: \"23d2265d-6549-4367-ac82-1867a8497b40\") " pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.962104 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/23d2265d-6549-4367-ac82-1867a8497b40-host-slash\") pod \"ovnkube-node-68f4b\" (UID: \"23d2265d-6549-4367-ac82-1867a8497b40\") " pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.962105 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/23d2265d-6549-4367-ac82-1867a8497b40-var-lib-openvswitch\") pod \"ovnkube-node-68f4b\" (UID: \"23d2265d-6549-4367-ac82-1867a8497b40\") " pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.962180 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/23d2265d-6549-4367-ac82-1867a8497b40-var-lib-openvswitch\") pod \"ovnkube-node-68f4b\" (UID: \"23d2265d-6549-4367-ac82-1867a8497b40\") " pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.962219 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/23d2265d-6549-4367-ac82-1867a8497b40-host-run-netns\") pod \"ovnkube-node-68f4b\" (UID: \"23d2265d-6549-4367-ac82-1867a8497b40\") " pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.962284 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/23d2265d-6549-4367-ac82-1867a8497b40-host-kubelet\") pod \"ovnkube-node-68f4b\" (UID: \"23d2265d-6549-4367-ac82-1867a8497b40\") " pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.962307 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/23d2265d-6549-4367-ac82-1867a8497b40-run-openvswitch\") pod \"ovnkube-node-68f4b\" (UID: \"23d2265d-6549-4367-ac82-1867a8497b40\") " pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.962324 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/23d2265d-6549-4367-ac82-1867a8497b40-host-run-netns\") pod \"ovnkube-node-68f4b\" (UID: \"23d2265d-6549-4367-ac82-1867a8497b40\") " pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.962364 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/23d2265d-6549-4367-ac82-1867a8497b40-host-kubelet\") pod \"ovnkube-node-68f4b\" (UID: \"23d2265d-6549-4367-ac82-1867a8497b40\") " pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.962367 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/23d2265d-6549-4367-ac82-1867a8497b40-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-68f4b\" (UID: \"23d2265d-6549-4367-ac82-1867a8497b40\") " pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.962330 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/23d2265d-6549-4367-ac82-1867a8497b40-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-68f4b\" (UID: \"23d2265d-6549-4367-ac82-1867a8497b40\") " pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.962414 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/23d2265d-6549-4367-ac82-1867a8497b40-run-openvswitch\") pod \"ovnkube-node-68f4b\" (UID: \"23d2265d-6549-4367-ac82-1867a8497b40\") " pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.962430 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/23d2265d-6549-4367-ac82-1867a8497b40-run-systemd\") pod \"ovnkube-node-68f4b\" (UID: \"23d2265d-6549-4367-ac82-1867a8497b40\") " pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.962457 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/23d2265d-6549-4367-ac82-1867a8497b40-run-systemd\") pod \"ovnkube-node-68f4b\" (UID: \"23d2265d-6549-4367-ac82-1867a8497b40\") " pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.962475 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/23d2265d-6549-4367-ac82-1867a8497b40-systemd-units\") pod \"ovnkube-node-68f4b\" (UID: \"23d2265d-6549-4367-ac82-1867a8497b40\") " pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.962517 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/23d2265d-6549-4367-ac82-1867a8497b40-host-run-ovn-kubernetes\") pod \"ovnkube-node-68f4b\" (UID: \"23d2265d-6549-4367-ac82-1867a8497b40\") " pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.962549 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/23d2265d-6549-4367-ac82-1867a8497b40-etc-openvswitch\") pod \"ovnkube-node-68f4b\" (UID: \"23d2265d-6549-4367-ac82-1867a8497b40\") " pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.962581 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/23d2265d-6549-4367-ac82-1867a8497b40-ovnkube-config\") pod \"ovnkube-node-68f4b\" (UID: \"23d2265d-6549-4367-ac82-1867a8497b40\") " pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.962606 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/23d2265d-6549-4367-ac82-1867a8497b40-node-log\") pod \"ovnkube-node-68f4b\" (UID: \"23d2265d-6549-4367-ac82-1867a8497b40\") " pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.962635 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/23d2265d-6549-4367-ac82-1867a8497b40-env-overrides\") pod \"ovnkube-node-68f4b\" (UID: \"23d2265d-6549-4367-ac82-1867a8497b40\") " pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.962661 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/23d2265d-6549-4367-ac82-1867a8497b40-run-ovn\") pod \"ovnkube-node-68f4b\" (UID: \"23d2265d-6549-4367-ac82-1867a8497b40\") " pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.962676 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/23d2265d-6549-4367-ac82-1867a8497b40-etc-openvswitch\") pod \"ovnkube-node-68f4b\" (UID: \"23d2265d-6549-4367-ac82-1867a8497b40\") " pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.962682 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/23d2265d-6549-4367-ac82-1867a8497b40-log-socket\") pod \"ovnkube-node-68f4b\" (UID: \"23d2265d-6549-4367-ac82-1867a8497b40\") " pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.962708 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/23d2265d-6549-4367-ac82-1867a8497b40-log-socket\") pod \"ovnkube-node-68f4b\" (UID: \"23d2265d-6549-4367-ac82-1867a8497b40\") " pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.962736 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/23d2265d-6549-4367-ac82-1867a8497b40-systemd-units\") pod \"ovnkube-node-68f4b\" (UID: \"23d2265d-6549-4367-ac82-1867a8497b40\") " pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.962742 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/23d2265d-6549-4367-ac82-1867a8497b40-ovnkube-script-lib\") pod \"ovnkube-node-68f4b\" (UID: \"23d2265d-6549-4367-ac82-1867a8497b40\") " pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.962797 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2fgl2\" (UniqueName: \"kubernetes.io/projected/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-kube-api-access-2fgl2\") on node \"crc\" DevicePath \"\"" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.962811 4753 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.962825 4753 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a-run-systemd\") on node \"crc\" DevicePath \"\"" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.962855 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/23d2265d-6549-4367-ac82-1867a8497b40-host-run-ovn-kubernetes\") pod \"ovnkube-node-68f4b\" (UID: \"23d2265d-6549-4367-ac82-1867a8497b40\") " pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.962887 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/23d2265d-6549-4367-ac82-1867a8497b40-node-log\") pod \"ovnkube-node-68f4b\" (UID: \"23d2265d-6549-4367-ac82-1867a8497b40\") " pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.963224 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/23d2265d-6549-4367-ac82-1867a8497b40-run-ovn\") pod \"ovnkube-node-68f4b\" (UID: \"23d2265d-6549-4367-ac82-1867a8497b40\") " pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.963580 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/23d2265d-6549-4367-ac82-1867a8497b40-ovnkube-config\") pod \"ovnkube-node-68f4b\" (UID: \"23d2265d-6549-4367-ac82-1867a8497b40\") " pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.963584 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/23d2265d-6549-4367-ac82-1867a8497b40-ovnkube-script-lib\") pod \"ovnkube-node-68f4b\" (UID: \"23d2265d-6549-4367-ac82-1867a8497b40\") " pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.963637 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/23d2265d-6549-4367-ac82-1867a8497b40-env-overrides\") pod \"ovnkube-node-68f4b\" (UID: \"23d2265d-6549-4367-ac82-1867a8497b40\") " pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.963647 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/23d2265d-6549-4367-ac82-1867a8497b40-host-cni-netd\") pod \"ovnkube-node-68f4b\" (UID: \"23d2265d-6549-4367-ac82-1867a8497b40\") " pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.968403 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/23d2265d-6549-4367-ac82-1867a8497b40-ovn-node-metrics-cert\") pod \"ovnkube-node-68f4b\" (UID: \"23d2265d-6549-4367-ac82-1867a8497b40\") " pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" Dec 05 17:16:04 crc kubenswrapper[4753]: I1205 17:16:04.987924 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4c6dp\" (UniqueName: \"kubernetes.io/projected/23d2265d-6549-4367-ac82-1867a8497b40-kube-api-access-4c6dp\") pod \"ovnkube-node-68f4b\" (UID: \"23d2265d-6549-4367-ac82-1867a8497b40\") " pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" Dec 05 17:16:05 crc kubenswrapper[4753]: I1205 17:16:05.154292 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" Dec 05 17:16:05 crc kubenswrapper[4753]: I1205 17:16:05.620820 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-98fvv_f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a/ovn-acl-logging/0.log" Dec 05 17:16:05 crc kubenswrapper[4753]: I1205 17:16:05.621492 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-98fvv_f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a/ovn-controller/0.log" Dec 05 17:16:05 crc kubenswrapper[4753]: I1205 17:16:05.622325 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" Dec 05 17:16:05 crc kubenswrapper[4753]: I1205 17:16:05.622294 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-98fvv" event={"ID":"f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a","Type":"ContainerDied","Data":"add9b9ff7a0d70abfbdaffb357f729ced8eb94fde0897438462c649c2503ed16"} Dec 05 17:16:05 crc kubenswrapper[4753]: I1205 17:16:05.623750 4753 scope.go:117] "RemoveContainer" containerID="a1c3ee586bdec79f6c186af02f1a6c42318a899455efe849a1d5d1f61c9274d0" Dec 05 17:16:05 crc kubenswrapper[4753]: I1205 17:16:05.625860 4753 generic.go:334] "Generic (PLEG): container finished" podID="23d2265d-6549-4367-ac82-1867a8497b40" containerID="3dd5b48d093c34d47a48080f8ef5dcdaf6b0039d6091b29c42a586916d96a0ec" exitCode=0 Dec 05 17:16:05 crc kubenswrapper[4753]: I1205 17:16:05.625940 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" event={"ID":"23d2265d-6549-4367-ac82-1867a8497b40","Type":"ContainerDied","Data":"3dd5b48d093c34d47a48080f8ef5dcdaf6b0039d6091b29c42a586916d96a0ec"} Dec 05 17:16:05 crc kubenswrapper[4753]: I1205 17:16:05.625973 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" event={"ID":"23d2265d-6549-4367-ac82-1867a8497b40","Type":"ContainerStarted","Data":"f246a0b8604d27fc2a0efaf535897734c498f0cd57b5a0b708136c20d051cdba"} Dec 05 17:16:05 crc kubenswrapper[4753]: I1205 17:16:05.630299 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-hpl8r_6b3d3501-4f16-4375-adf2-fd54b1cd13cf/kube-multus/2.log" Dec 05 17:16:05 crc kubenswrapper[4753]: I1205 17:16:05.648203 4753 scope.go:117] "RemoveContainer" containerID="899db0b5d45776b0560a2908ce63ec8ea2b458e2e492189737e930688b4ed2e2" Dec 05 17:16:05 crc kubenswrapper[4753]: I1205 17:16:05.695188 4753 scope.go:117] "RemoveContainer" containerID="8d0273d889da0bd6ea3db8040da2eab62fc750f308cb9a62dee71b6c9f2fadd9" Dec 05 17:16:05 crc kubenswrapper[4753]: I1205 17:16:05.707696 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-98fvv"] Dec 05 17:16:05 crc kubenswrapper[4753]: I1205 17:16:05.731222 4753 scope.go:117] "RemoveContainer" containerID="e79e99eb44ff95c2f9929326313e41629e6e03ebc068537bd27bc4c89ad5b0ec" Dec 05 17:16:05 crc kubenswrapper[4753]: I1205 17:16:05.732792 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-98fvv"] Dec 05 17:16:05 crc kubenswrapper[4753]: I1205 17:16:05.767363 4753 scope.go:117] "RemoveContainer" containerID="0cb72322d436f10b006a4bb7a91b255451aba90e86100fdf249be6443159bd89" Dec 05 17:16:05 crc kubenswrapper[4753]: I1205 17:16:05.839486 4753 scope.go:117] "RemoveContainer" containerID="6f6585b3b62fc4a78f89b3413326d6d5259cb8c338936f2d5def6185d81d46f2" Dec 05 17:16:05 crc kubenswrapper[4753]: I1205 17:16:05.875450 4753 scope.go:117] "RemoveContainer" containerID="20bcab2c2912120c819bacdf478db82a78500908125e6efadaef1f3409eb0d49" Dec 05 17:16:05 crc kubenswrapper[4753]: I1205 17:16:05.928972 4753 scope.go:117] "RemoveContainer" containerID="9ead15f2a3179f03d770beadcaa1374561d5aebb4874615a334edccff0c2f300" Dec 05 17:16:05 crc kubenswrapper[4753]: I1205 17:16:05.970420 4753 scope.go:117] "RemoveContainer" containerID="073ffcef7324b498f2cb47680b64bf44e474a7732a5c2b34661da22c3e0f6c17" Dec 05 17:16:06 crc kubenswrapper[4753]: I1205 17:16:06.663131 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" event={"ID":"23d2265d-6549-4367-ac82-1867a8497b40","Type":"ContainerStarted","Data":"c04f20ef55eebdc55c30a232da0cfcb0cdd91fcedf4a6e116c017f90f59bc3ce"} Dec 05 17:16:06 crc kubenswrapper[4753]: I1205 17:16:06.663197 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" event={"ID":"23d2265d-6549-4367-ac82-1867a8497b40","Type":"ContainerStarted","Data":"1ae5a359df248399e090348bf91be40d244abf4508e3f0faaad7f7821ebe10f8"} Dec 05 17:16:06 crc kubenswrapper[4753]: I1205 17:16:06.663210 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" event={"ID":"23d2265d-6549-4367-ac82-1867a8497b40","Type":"ContainerStarted","Data":"9e169098eb227f0d7e1a2339694ce9b3fc9c3a909cbe6dd53e0bbc6e0f2b8432"} Dec 05 17:16:06 crc kubenswrapper[4753]: I1205 17:16:06.663220 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" event={"ID":"23d2265d-6549-4367-ac82-1867a8497b40","Type":"ContainerStarted","Data":"bbee2455cea393eac50296a516877c0e0243f247c57ff2544efbe3a3608f63bc"} Dec 05 17:16:06 crc kubenswrapper[4753]: I1205 17:16:06.663231 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" event={"ID":"23d2265d-6549-4367-ac82-1867a8497b40","Type":"ContainerStarted","Data":"9ec8988016343ee673c873fba0297c102d2a45a244bfdab6c1a73b8be81c4ab9"} Dec 05 17:16:06 crc kubenswrapper[4753]: I1205 17:16:06.663240 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" event={"ID":"23d2265d-6549-4367-ac82-1867a8497b40","Type":"ContainerStarted","Data":"6dc2421db02aac1e2c3e0d21c9e12191b2d01b8e3ffee85a9fc6b607156afd38"} Dec 05 17:16:07 crc kubenswrapper[4753]: I1205 17:16:07.727512 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a" path="/var/lib/kubelet/pods/f00ad131-6b85-4d0d-8fa5-1dfbdc7e161a/volumes" Dec 05 17:16:09 crc kubenswrapper[4753]: I1205 17:16:09.687105 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" event={"ID":"23d2265d-6549-4367-ac82-1867a8497b40","Type":"ContainerStarted","Data":"e9e3d30f110a4fdf66a5b4e79f1bae11c8a67d3f6f69020230d6c999b05f6c0e"} Dec 05 17:16:10 crc kubenswrapper[4753]: I1205 17:16:10.691542 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-f4kz7"] Dec 05 17:16:10 crc kubenswrapper[4753]: I1205 17:16:10.693205 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-f4kz7" Dec 05 17:16:10 crc kubenswrapper[4753]: I1205 17:16:10.695656 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"kube-root-ca.crt" Dec 05 17:16:10 crc kubenswrapper[4753]: I1205 17:16:10.695736 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"openshift-service-ca.crt" Dec 05 17:16:10 crc kubenswrapper[4753]: I1205 17:16:10.702279 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-dockercfg-qj6pz" Dec 05 17:16:10 crc kubenswrapper[4753]: I1205 17:16:10.761268 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-57647d658-tsxw8"] Dec 05 17:16:10 crc kubenswrapper[4753]: I1205 17:16:10.763281 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-57647d658-tsxw8" Dec 05 17:16:10 crc kubenswrapper[4753]: I1205 17:16:10.770713 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-service-cert" Dec 05 17:16:10 crc kubenswrapper[4753]: I1205 17:16:10.770774 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-dockercfg-q5djm" Dec 05 17:16:10 crc kubenswrapper[4753]: I1205 17:16:10.772508 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-57647d658-ds52w"] Dec 05 17:16:10 crc kubenswrapper[4753]: I1205 17:16:10.774251 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-57647d658-ds52w" Dec 05 17:16:10 crc kubenswrapper[4753]: I1205 17:16:10.860443 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b1a19a2b-30b2-47bd-a4e3-cb23e37e16cf-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-57647d658-tsxw8\" (UID: \"b1a19a2b-30b2-47bd-a4e3-cb23e37e16cf\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-57647d658-tsxw8" Dec 05 17:16:10 crc kubenswrapper[4753]: I1205 17:16:10.860525 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b1a19a2b-30b2-47bd-a4e3-cb23e37e16cf-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-57647d658-tsxw8\" (UID: \"b1a19a2b-30b2-47bd-a4e3-cb23e37e16cf\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-57647d658-tsxw8" Dec 05 17:16:10 crc kubenswrapper[4753]: I1205 17:16:10.861900 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vjdfd\" (UniqueName: \"kubernetes.io/projected/3a0c0fe6-2fda-4398-8f9b-4ba0b6b52182-kube-api-access-vjdfd\") pod \"obo-prometheus-operator-668cf9dfbb-f4kz7\" (UID: \"3a0c0fe6-2fda-4398-8f9b-4ba0b6b52182\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-f4kz7" Dec 05 17:16:10 crc kubenswrapper[4753]: I1205 17:16:10.922272 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-x8ww2"] Dec 05 17:16:10 crc kubenswrapper[4753]: I1205 17:16:10.923713 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-x8ww2" Dec 05 17:16:10 crc kubenswrapper[4753]: I1205 17:16:10.925870 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-sa-dockercfg-mb8qb" Dec 05 17:16:10 crc kubenswrapper[4753]: I1205 17:16:10.926289 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-tls" Dec 05 17:16:10 crc kubenswrapper[4753]: I1205 17:16:10.963032 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vjdfd\" (UniqueName: \"kubernetes.io/projected/3a0c0fe6-2fda-4398-8f9b-4ba0b6b52182-kube-api-access-vjdfd\") pod \"obo-prometheus-operator-668cf9dfbb-f4kz7\" (UID: \"3a0c0fe6-2fda-4398-8f9b-4ba0b6b52182\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-f4kz7" Dec 05 17:16:10 crc kubenswrapper[4753]: I1205 17:16:10.963105 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/96a23d8c-f2af-4e5d-afa5-4734f81f73ef-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-57647d658-ds52w\" (UID: \"96a23d8c-f2af-4e5d-afa5-4734f81f73ef\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-57647d658-ds52w" Dec 05 17:16:10 crc kubenswrapper[4753]: I1205 17:16:10.963159 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b1a19a2b-30b2-47bd-a4e3-cb23e37e16cf-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-57647d658-tsxw8\" (UID: \"b1a19a2b-30b2-47bd-a4e3-cb23e37e16cf\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-57647d658-tsxw8" Dec 05 17:16:10 crc kubenswrapper[4753]: I1205 17:16:10.963194 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b1a19a2b-30b2-47bd-a4e3-cb23e37e16cf-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-57647d658-tsxw8\" (UID: \"b1a19a2b-30b2-47bd-a4e3-cb23e37e16cf\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-57647d658-tsxw8" Dec 05 17:16:10 crc kubenswrapper[4753]: I1205 17:16:10.963231 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/96a23d8c-f2af-4e5d-afa5-4734f81f73ef-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-57647d658-ds52w\" (UID: \"96a23d8c-f2af-4e5d-afa5-4734f81f73ef\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-57647d658-ds52w" Dec 05 17:16:10 crc kubenswrapper[4753]: I1205 17:16:10.972273 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b1a19a2b-30b2-47bd-a4e3-cb23e37e16cf-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-57647d658-tsxw8\" (UID: \"b1a19a2b-30b2-47bd-a4e3-cb23e37e16cf\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-57647d658-tsxw8" Dec 05 17:16:10 crc kubenswrapper[4753]: I1205 17:16:10.975195 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b1a19a2b-30b2-47bd-a4e3-cb23e37e16cf-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-57647d658-tsxw8\" (UID: \"b1a19a2b-30b2-47bd-a4e3-cb23e37e16cf\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-57647d658-tsxw8" Dec 05 17:16:10 crc kubenswrapper[4753]: I1205 17:16:10.987564 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vjdfd\" (UniqueName: \"kubernetes.io/projected/3a0c0fe6-2fda-4398-8f9b-4ba0b6b52182-kube-api-access-vjdfd\") pod \"obo-prometheus-operator-668cf9dfbb-f4kz7\" (UID: \"3a0c0fe6-2fda-4398-8f9b-4ba0b6b52182\") " pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-f4kz7" Dec 05 17:16:11 crc kubenswrapper[4753]: I1205 17:16:11.011370 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-f4kz7" Dec 05 17:16:11 crc kubenswrapper[4753]: I1205 17:16:11.065063 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5zqx6\" (UniqueName: \"kubernetes.io/projected/6b271246-3310-483c-a548-db788331725d-kube-api-access-5zqx6\") pod \"observability-operator-d8bb48f5d-x8ww2\" (UID: \"6b271246-3310-483c-a548-db788331725d\") " pod="openshift-operators/observability-operator-d8bb48f5d-x8ww2" Dec 05 17:16:11 crc kubenswrapper[4753]: I1205 17:16:11.065121 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/6b271246-3310-483c-a548-db788331725d-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-x8ww2\" (UID: \"6b271246-3310-483c-a548-db788331725d\") " pod="openshift-operators/observability-operator-d8bb48f5d-x8ww2" Dec 05 17:16:11 crc kubenswrapper[4753]: I1205 17:16:11.065186 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/96a23d8c-f2af-4e5d-afa5-4734f81f73ef-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-57647d658-ds52w\" (UID: \"96a23d8c-f2af-4e5d-afa5-4734f81f73ef\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-57647d658-ds52w" Dec 05 17:16:11 crc kubenswrapper[4753]: I1205 17:16:11.065453 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/96a23d8c-f2af-4e5d-afa5-4734f81f73ef-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-57647d658-ds52w\" (UID: \"96a23d8c-f2af-4e5d-afa5-4734f81f73ef\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-57647d658-ds52w" Dec 05 17:16:11 crc kubenswrapper[4753]: I1205 17:16:11.069635 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/96a23d8c-f2af-4e5d-afa5-4734f81f73ef-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-57647d658-ds52w\" (UID: \"96a23d8c-f2af-4e5d-afa5-4734f81f73ef\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-57647d658-ds52w" Dec 05 17:16:11 crc kubenswrapper[4753]: I1205 17:16:11.073692 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/96a23d8c-f2af-4e5d-afa5-4734f81f73ef-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-57647d658-ds52w\" (UID: \"96a23d8c-f2af-4e5d-afa5-4734f81f73ef\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-57647d658-ds52w" Dec 05 17:16:11 crc kubenswrapper[4753]: E1205 17:16:11.074729 4753 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-f4kz7_openshift-operators_3a0c0fe6-2fda-4398-8f9b-4ba0b6b52182_0(613898bdf7342f0c32b7adf1c2b43f68bff9b7896e747050505caf149c7f7b0c): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 05 17:16:11 crc kubenswrapper[4753]: E1205 17:16:11.074801 4753 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-f4kz7_openshift-operators_3a0c0fe6-2fda-4398-8f9b-4ba0b6b52182_0(613898bdf7342f0c32b7adf1c2b43f68bff9b7896e747050505caf149c7f7b0c): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-f4kz7" Dec 05 17:16:11 crc kubenswrapper[4753]: E1205 17:16:11.074827 4753 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-f4kz7_openshift-operators_3a0c0fe6-2fda-4398-8f9b-4ba0b6b52182_0(613898bdf7342f0c32b7adf1c2b43f68bff9b7896e747050505caf149c7f7b0c): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-f4kz7" Dec 05 17:16:11 crc kubenswrapper[4753]: E1205 17:16:11.074885 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-668cf9dfbb-f4kz7_openshift-operators(3a0c0fe6-2fda-4398-8f9b-4ba0b6b52182)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-668cf9dfbb-f4kz7_openshift-operators(3a0c0fe6-2fda-4398-8f9b-4ba0b6b52182)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-f4kz7_openshift-operators_3a0c0fe6-2fda-4398-8f9b-4ba0b6b52182_0(613898bdf7342f0c32b7adf1c2b43f68bff9b7896e747050505caf149c7f7b0c): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-f4kz7" podUID="3a0c0fe6-2fda-4398-8f9b-4ba0b6b52182" Dec 05 17:16:11 crc kubenswrapper[4753]: I1205 17:16:11.085998 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-57647d658-tsxw8" Dec 05 17:16:11 crc kubenswrapper[4753]: I1205 17:16:11.094598 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-57647d658-ds52w" Dec 05 17:16:11 crc kubenswrapper[4753]: I1205 17:16:11.123363 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/perses-operator-5446b9c989-66rht"] Dec 05 17:16:11 crc kubenswrapper[4753]: I1205 17:16:11.124524 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-66rht" Dec 05 17:16:11 crc kubenswrapper[4753]: I1205 17:16:11.131675 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"perses-operator-dockercfg-vk7hg" Dec 05 17:16:11 crc kubenswrapper[4753]: E1205 17:16:11.144349 4753 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-57647d658-tsxw8_openshift-operators_b1a19a2b-30b2-47bd-a4e3-cb23e37e16cf_0(9de620b595612c4c1cd17326eedd9786c71d3912b19457436fcd315a461236f1): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 05 17:16:11 crc kubenswrapper[4753]: E1205 17:16:11.144448 4753 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-57647d658-tsxw8_openshift-operators_b1a19a2b-30b2-47bd-a4e3-cb23e37e16cf_0(9de620b595612c4c1cd17326eedd9786c71d3912b19457436fcd315a461236f1): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-57647d658-tsxw8" Dec 05 17:16:11 crc kubenswrapper[4753]: E1205 17:16:11.144509 4753 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-57647d658-tsxw8_openshift-operators_b1a19a2b-30b2-47bd-a4e3-cb23e37e16cf_0(9de620b595612c4c1cd17326eedd9786c71d3912b19457436fcd315a461236f1): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-57647d658-tsxw8" Dec 05 17:16:11 crc kubenswrapper[4753]: E1205 17:16:11.144583 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-admission-webhook-57647d658-tsxw8_openshift-operators(b1a19a2b-30b2-47bd-a4e3-cb23e37e16cf)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-admission-webhook-57647d658-tsxw8_openshift-operators(b1a19a2b-30b2-47bd-a4e3-cb23e37e16cf)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-57647d658-tsxw8_openshift-operators_b1a19a2b-30b2-47bd-a4e3-cb23e37e16cf_0(9de620b595612c4c1cd17326eedd9786c71d3912b19457436fcd315a461236f1): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-57647d658-tsxw8" podUID="b1a19a2b-30b2-47bd-a4e3-cb23e37e16cf" Dec 05 17:16:11 crc kubenswrapper[4753]: E1205 17:16:11.159531 4753 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-57647d658-ds52w_openshift-operators_96a23d8c-f2af-4e5d-afa5-4734f81f73ef_0(8d4bfaff2316c00bf9a21d4a534a4b63b7b6d7625ac2f02785dd0efd61a7b046): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 05 17:16:11 crc kubenswrapper[4753]: E1205 17:16:11.159630 4753 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-57647d658-ds52w_openshift-operators_96a23d8c-f2af-4e5d-afa5-4734f81f73ef_0(8d4bfaff2316c00bf9a21d4a534a4b63b7b6d7625ac2f02785dd0efd61a7b046): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-57647d658-ds52w" Dec 05 17:16:11 crc kubenswrapper[4753]: E1205 17:16:11.159655 4753 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-57647d658-ds52w_openshift-operators_96a23d8c-f2af-4e5d-afa5-4734f81f73ef_0(8d4bfaff2316c00bf9a21d4a534a4b63b7b6d7625ac2f02785dd0efd61a7b046): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-57647d658-ds52w" Dec 05 17:16:11 crc kubenswrapper[4753]: E1205 17:16:11.159715 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-admission-webhook-57647d658-ds52w_openshift-operators(96a23d8c-f2af-4e5d-afa5-4734f81f73ef)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-admission-webhook-57647d658-ds52w_openshift-operators(96a23d8c-f2af-4e5d-afa5-4734f81f73ef)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-57647d658-ds52w_openshift-operators_96a23d8c-f2af-4e5d-afa5-4734f81f73ef_0(8d4bfaff2316c00bf9a21d4a534a4b63b7b6d7625ac2f02785dd0efd61a7b046): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-57647d658-ds52w" podUID="96a23d8c-f2af-4e5d-afa5-4734f81f73ef" Dec 05 17:16:11 crc kubenswrapper[4753]: I1205 17:16:11.166824 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5zqx6\" (UniqueName: \"kubernetes.io/projected/6b271246-3310-483c-a548-db788331725d-kube-api-access-5zqx6\") pod \"observability-operator-d8bb48f5d-x8ww2\" (UID: \"6b271246-3310-483c-a548-db788331725d\") " pod="openshift-operators/observability-operator-d8bb48f5d-x8ww2" Dec 05 17:16:11 crc kubenswrapper[4753]: I1205 17:16:11.166882 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/6b271246-3310-483c-a548-db788331725d-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-x8ww2\" (UID: \"6b271246-3310-483c-a548-db788331725d\") " pod="openshift-operators/observability-operator-d8bb48f5d-x8ww2" Dec 05 17:16:11 crc kubenswrapper[4753]: I1205 17:16:11.173910 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/6b271246-3310-483c-a548-db788331725d-observability-operator-tls\") pod \"observability-operator-d8bb48f5d-x8ww2\" (UID: \"6b271246-3310-483c-a548-db788331725d\") " pod="openshift-operators/observability-operator-d8bb48f5d-x8ww2" Dec 05 17:16:11 crc kubenswrapper[4753]: I1205 17:16:11.189694 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5zqx6\" (UniqueName: \"kubernetes.io/projected/6b271246-3310-483c-a548-db788331725d-kube-api-access-5zqx6\") pod \"observability-operator-d8bb48f5d-x8ww2\" (UID: \"6b271246-3310-483c-a548-db788331725d\") " pod="openshift-operators/observability-operator-d8bb48f5d-x8ww2" Dec 05 17:16:11 crc kubenswrapper[4753]: I1205 17:16:11.242762 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-x8ww2" Dec 05 17:16:11 crc kubenswrapper[4753]: I1205 17:16:11.268429 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w7qsk\" (UniqueName: \"kubernetes.io/projected/a1cc3eef-b65a-496f-9cf1-7567825fce78-kube-api-access-w7qsk\") pod \"perses-operator-5446b9c989-66rht\" (UID: \"a1cc3eef-b65a-496f-9cf1-7567825fce78\") " pod="openshift-operators/perses-operator-5446b9c989-66rht" Dec 05 17:16:11 crc kubenswrapper[4753]: I1205 17:16:11.268553 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/a1cc3eef-b65a-496f-9cf1-7567825fce78-openshift-service-ca\") pod \"perses-operator-5446b9c989-66rht\" (UID: \"a1cc3eef-b65a-496f-9cf1-7567825fce78\") " pod="openshift-operators/perses-operator-5446b9c989-66rht" Dec 05 17:16:11 crc kubenswrapper[4753]: E1205 17:16:11.291539 4753 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-x8ww2_openshift-operators_6b271246-3310-483c-a548-db788331725d_0(a832fb9f678b8b02ff60c2aff1880017c925c25b7e73fe5707b4f9664363967a): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 05 17:16:11 crc kubenswrapper[4753]: E1205 17:16:11.291636 4753 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-x8ww2_openshift-operators_6b271246-3310-483c-a548-db788331725d_0(a832fb9f678b8b02ff60c2aff1880017c925c25b7e73fe5707b4f9664363967a): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/observability-operator-d8bb48f5d-x8ww2" Dec 05 17:16:11 crc kubenswrapper[4753]: E1205 17:16:11.291671 4753 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-x8ww2_openshift-operators_6b271246-3310-483c-a548-db788331725d_0(a832fb9f678b8b02ff60c2aff1880017c925c25b7e73fe5707b4f9664363967a): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/observability-operator-d8bb48f5d-x8ww2" Dec 05 17:16:11 crc kubenswrapper[4753]: E1205 17:16:11.291739 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"observability-operator-d8bb48f5d-x8ww2_openshift-operators(6b271246-3310-483c-a548-db788331725d)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"observability-operator-d8bb48f5d-x8ww2_openshift-operators(6b271246-3310-483c-a548-db788331725d)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-x8ww2_openshift-operators_6b271246-3310-483c-a548-db788331725d_0(a832fb9f678b8b02ff60c2aff1880017c925c25b7e73fe5707b4f9664363967a): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/observability-operator-d8bb48f5d-x8ww2" podUID="6b271246-3310-483c-a548-db788331725d" Dec 05 17:16:11 crc kubenswrapper[4753]: I1205 17:16:11.370234 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/a1cc3eef-b65a-496f-9cf1-7567825fce78-openshift-service-ca\") pod \"perses-operator-5446b9c989-66rht\" (UID: \"a1cc3eef-b65a-496f-9cf1-7567825fce78\") " pod="openshift-operators/perses-operator-5446b9c989-66rht" Dec 05 17:16:11 crc kubenswrapper[4753]: I1205 17:16:11.370736 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w7qsk\" (UniqueName: \"kubernetes.io/projected/a1cc3eef-b65a-496f-9cf1-7567825fce78-kube-api-access-w7qsk\") pod \"perses-operator-5446b9c989-66rht\" (UID: \"a1cc3eef-b65a-496f-9cf1-7567825fce78\") " pod="openshift-operators/perses-operator-5446b9c989-66rht" Dec 05 17:16:11 crc kubenswrapper[4753]: I1205 17:16:11.372226 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/a1cc3eef-b65a-496f-9cf1-7567825fce78-openshift-service-ca\") pod \"perses-operator-5446b9c989-66rht\" (UID: \"a1cc3eef-b65a-496f-9cf1-7567825fce78\") " pod="openshift-operators/perses-operator-5446b9c989-66rht" Dec 05 17:16:11 crc kubenswrapper[4753]: I1205 17:16:11.395947 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w7qsk\" (UniqueName: \"kubernetes.io/projected/a1cc3eef-b65a-496f-9cf1-7567825fce78-kube-api-access-w7qsk\") pod \"perses-operator-5446b9c989-66rht\" (UID: \"a1cc3eef-b65a-496f-9cf1-7567825fce78\") " pod="openshift-operators/perses-operator-5446b9c989-66rht" Dec 05 17:16:11 crc kubenswrapper[4753]: I1205 17:16:11.442128 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-66rht" Dec 05 17:16:11 crc kubenswrapper[4753]: E1205 17:16:11.464458 4753 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-66rht_openshift-operators_a1cc3eef-b65a-496f-9cf1-7567825fce78_0(5ae3a61a97c135b602308308c2a6a7f78faabf916cf50d476850b3b51c15fe9f): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 05 17:16:11 crc kubenswrapper[4753]: E1205 17:16:11.464644 4753 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-66rht_openshift-operators_a1cc3eef-b65a-496f-9cf1-7567825fce78_0(5ae3a61a97c135b602308308c2a6a7f78faabf916cf50d476850b3b51c15fe9f): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/perses-operator-5446b9c989-66rht" Dec 05 17:16:11 crc kubenswrapper[4753]: E1205 17:16:11.464713 4753 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-66rht_openshift-operators_a1cc3eef-b65a-496f-9cf1-7567825fce78_0(5ae3a61a97c135b602308308c2a6a7f78faabf916cf50d476850b3b51c15fe9f): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/perses-operator-5446b9c989-66rht" Dec 05 17:16:11 crc kubenswrapper[4753]: E1205 17:16:11.464823 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"perses-operator-5446b9c989-66rht_openshift-operators(a1cc3eef-b65a-496f-9cf1-7567825fce78)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"perses-operator-5446b9c989-66rht_openshift-operators(a1cc3eef-b65a-496f-9cf1-7567825fce78)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-66rht_openshift-operators_a1cc3eef-b65a-496f-9cf1-7567825fce78_0(5ae3a61a97c135b602308308c2a6a7f78faabf916cf50d476850b3b51c15fe9f): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/perses-operator-5446b9c989-66rht" podUID="a1cc3eef-b65a-496f-9cf1-7567825fce78" Dec 05 17:16:11 crc kubenswrapper[4753]: I1205 17:16:11.703592 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" event={"ID":"23d2265d-6549-4367-ac82-1867a8497b40","Type":"ContainerStarted","Data":"f35f57d48e8b235dedf6e61f8bf79582fc3c92522814cd1742a6e0e9f9c99b61"} Dec 05 17:16:11 crc kubenswrapper[4753]: I1205 17:16:11.704039 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" Dec 05 17:16:11 crc kubenswrapper[4753]: I1205 17:16:11.752789 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" podStartSLOduration=7.752763159 podStartE2EDuration="7.752763159s" podCreationTimestamp="2025-12-05 17:16:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:16:11.743276545 +0000 UTC m=+710.246383551" watchObservedRunningTime="2025-12-05 17:16:11.752763159 +0000 UTC m=+710.255870165" Dec 05 17:16:11 crc kubenswrapper[4753]: I1205 17:16:11.768704 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" Dec 05 17:16:12 crc kubenswrapper[4753]: I1205 17:16:12.324039 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-57647d658-ds52w"] Dec 05 17:16:12 crc kubenswrapper[4753]: I1205 17:16:12.324173 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-57647d658-ds52w" Dec 05 17:16:12 crc kubenswrapper[4753]: I1205 17:16:12.324620 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-57647d658-ds52w" Dec 05 17:16:12 crc kubenswrapper[4753]: I1205 17:16:12.335457 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-57647d658-tsxw8"] Dec 05 17:16:12 crc kubenswrapper[4753]: I1205 17:16:12.335656 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-57647d658-tsxw8" Dec 05 17:16:12 crc kubenswrapper[4753]: I1205 17:16:12.336325 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-57647d658-tsxw8" Dec 05 17:16:12 crc kubenswrapper[4753]: I1205 17:16:12.343026 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-f4kz7"] Dec 05 17:16:12 crc kubenswrapper[4753]: I1205 17:16:12.343291 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-f4kz7" Dec 05 17:16:12 crc kubenswrapper[4753]: I1205 17:16:12.344244 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-f4kz7" Dec 05 17:16:12 crc kubenswrapper[4753]: I1205 17:16:12.349587 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-x8ww2"] Dec 05 17:16:12 crc kubenswrapper[4753]: I1205 17:16:12.349760 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-x8ww2" Dec 05 17:16:12 crc kubenswrapper[4753]: I1205 17:16:12.350449 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-x8ww2" Dec 05 17:16:12 crc kubenswrapper[4753]: I1205 17:16:12.377281 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5446b9c989-66rht"] Dec 05 17:16:12 crc kubenswrapper[4753]: I1205 17:16:12.377497 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-66rht" Dec 05 17:16:12 crc kubenswrapper[4753]: I1205 17:16:12.378194 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-66rht" Dec 05 17:16:12 crc kubenswrapper[4753]: E1205 17:16:12.440562 4753 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-57647d658-ds52w_openshift-operators_96a23d8c-f2af-4e5d-afa5-4734f81f73ef_0(069c112a47b3a4f9600af5a2d4d7ea5c94065b13a7a5a3eced74419f170c90a2): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 05 17:16:12 crc kubenswrapper[4753]: E1205 17:16:12.440684 4753 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-57647d658-ds52w_openshift-operators_96a23d8c-f2af-4e5d-afa5-4734f81f73ef_0(069c112a47b3a4f9600af5a2d4d7ea5c94065b13a7a5a3eced74419f170c90a2): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-57647d658-ds52w" Dec 05 17:16:12 crc kubenswrapper[4753]: E1205 17:16:12.440727 4753 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-57647d658-ds52w_openshift-operators_96a23d8c-f2af-4e5d-afa5-4734f81f73ef_0(069c112a47b3a4f9600af5a2d4d7ea5c94065b13a7a5a3eced74419f170c90a2): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-57647d658-ds52w" Dec 05 17:16:12 crc kubenswrapper[4753]: E1205 17:16:12.440797 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-admission-webhook-57647d658-ds52w_openshift-operators(96a23d8c-f2af-4e5d-afa5-4734f81f73ef)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-admission-webhook-57647d658-ds52w_openshift-operators(96a23d8c-f2af-4e5d-afa5-4734f81f73ef)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-57647d658-ds52w_openshift-operators_96a23d8c-f2af-4e5d-afa5-4734f81f73ef_0(069c112a47b3a4f9600af5a2d4d7ea5c94065b13a7a5a3eced74419f170c90a2): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-57647d658-ds52w" podUID="96a23d8c-f2af-4e5d-afa5-4734f81f73ef" Dec 05 17:16:12 crc kubenswrapper[4753]: E1205 17:16:12.440876 4753 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-57647d658-tsxw8_openshift-operators_b1a19a2b-30b2-47bd-a4e3-cb23e37e16cf_0(4b2d12f1776f0e950537daa2cc4f386aa56761c659d96c0063a9e38435ed8ada): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 05 17:16:12 crc kubenswrapper[4753]: E1205 17:16:12.440906 4753 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-57647d658-tsxw8_openshift-operators_b1a19a2b-30b2-47bd-a4e3-cb23e37e16cf_0(4b2d12f1776f0e950537daa2cc4f386aa56761c659d96c0063a9e38435ed8ada): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-57647d658-tsxw8" Dec 05 17:16:12 crc kubenswrapper[4753]: E1205 17:16:12.440927 4753 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-57647d658-tsxw8_openshift-operators_b1a19a2b-30b2-47bd-a4e3-cb23e37e16cf_0(4b2d12f1776f0e950537daa2cc4f386aa56761c659d96c0063a9e38435ed8ada): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-57647d658-tsxw8" Dec 05 17:16:12 crc kubenswrapper[4753]: E1205 17:16:12.440964 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-admission-webhook-57647d658-tsxw8_openshift-operators(b1a19a2b-30b2-47bd-a4e3-cb23e37e16cf)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-admission-webhook-57647d658-tsxw8_openshift-operators(b1a19a2b-30b2-47bd-a4e3-cb23e37e16cf)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-57647d658-tsxw8_openshift-operators_b1a19a2b-30b2-47bd-a4e3-cb23e37e16cf_0(4b2d12f1776f0e950537daa2cc4f386aa56761c659d96c0063a9e38435ed8ada): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-57647d658-tsxw8" podUID="b1a19a2b-30b2-47bd-a4e3-cb23e37e16cf" Dec 05 17:16:12 crc kubenswrapper[4753]: E1205 17:16:12.467888 4753 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-f4kz7_openshift-operators_3a0c0fe6-2fda-4398-8f9b-4ba0b6b52182_0(4ab440618eba18c605fc4d9b5438bb3390a45585c96064513fe966cb12575c17): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 05 17:16:12 crc kubenswrapper[4753]: E1205 17:16:12.467979 4753 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-f4kz7_openshift-operators_3a0c0fe6-2fda-4398-8f9b-4ba0b6b52182_0(4ab440618eba18c605fc4d9b5438bb3390a45585c96064513fe966cb12575c17): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-f4kz7" Dec 05 17:16:12 crc kubenswrapper[4753]: E1205 17:16:12.468010 4753 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-f4kz7_openshift-operators_3a0c0fe6-2fda-4398-8f9b-4ba0b6b52182_0(4ab440618eba18c605fc4d9b5438bb3390a45585c96064513fe966cb12575c17): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-f4kz7" Dec 05 17:16:12 crc kubenswrapper[4753]: E1205 17:16:12.468063 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-668cf9dfbb-f4kz7_openshift-operators(3a0c0fe6-2fda-4398-8f9b-4ba0b6b52182)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-668cf9dfbb-f4kz7_openshift-operators(3a0c0fe6-2fda-4398-8f9b-4ba0b6b52182)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-f4kz7_openshift-operators_3a0c0fe6-2fda-4398-8f9b-4ba0b6b52182_0(4ab440618eba18c605fc4d9b5438bb3390a45585c96064513fe966cb12575c17): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-f4kz7" podUID="3a0c0fe6-2fda-4398-8f9b-4ba0b6b52182" Dec 05 17:16:12 crc kubenswrapper[4753]: E1205 17:16:12.482209 4753 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-x8ww2_openshift-operators_6b271246-3310-483c-a548-db788331725d_0(dab08f4d3d1463723149d535faf33b698487ec3851e340e99af7a86b6b6970e0): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 05 17:16:12 crc kubenswrapper[4753]: E1205 17:16:12.482267 4753 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-x8ww2_openshift-operators_6b271246-3310-483c-a548-db788331725d_0(dab08f4d3d1463723149d535faf33b698487ec3851e340e99af7a86b6b6970e0): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/observability-operator-d8bb48f5d-x8ww2" Dec 05 17:16:12 crc kubenswrapper[4753]: E1205 17:16:12.482296 4753 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-x8ww2_openshift-operators_6b271246-3310-483c-a548-db788331725d_0(dab08f4d3d1463723149d535faf33b698487ec3851e340e99af7a86b6b6970e0): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/observability-operator-d8bb48f5d-x8ww2" Dec 05 17:16:12 crc kubenswrapper[4753]: E1205 17:16:12.482340 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"observability-operator-d8bb48f5d-x8ww2_openshift-operators(6b271246-3310-483c-a548-db788331725d)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"observability-operator-d8bb48f5d-x8ww2_openshift-operators(6b271246-3310-483c-a548-db788331725d)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-x8ww2_openshift-operators_6b271246-3310-483c-a548-db788331725d_0(dab08f4d3d1463723149d535faf33b698487ec3851e340e99af7a86b6b6970e0): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/observability-operator-d8bb48f5d-x8ww2" podUID="6b271246-3310-483c-a548-db788331725d" Dec 05 17:16:12 crc kubenswrapper[4753]: E1205 17:16:12.486420 4753 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-66rht_openshift-operators_a1cc3eef-b65a-496f-9cf1-7567825fce78_0(fc8d98f24f4ba9fe030ede819140e4148851ea91a46eabf08330595ad859646f): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 05 17:16:12 crc kubenswrapper[4753]: E1205 17:16:12.486536 4753 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-66rht_openshift-operators_a1cc3eef-b65a-496f-9cf1-7567825fce78_0(fc8d98f24f4ba9fe030ede819140e4148851ea91a46eabf08330595ad859646f): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/perses-operator-5446b9c989-66rht" Dec 05 17:16:12 crc kubenswrapper[4753]: E1205 17:16:12.486579 4753 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-66rht_openshift-operators_a1cc3eef-b65a-496f-9cf1-7567825fce78_0(fc8d98f24f4ba9fe030ede819140e4148851ea91a46eabf08330595ad859646f): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/perses-operator-5446b9c989-66rht" Dec 05 17:16:12 crc kubenswrapper[4753]: E1205 17:16:12.486651 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"perses-operator-5446b9c989-66rht_openshift-operators(a1cc3eef-b65a-496f-9cf1-7567825fce78)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"perses-operator-5446b9c989-66rht_openshift-operators(a1cc3eef-b65a-496f-9cf1-7567825fce78)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-66rht_openshift-operators_a1cc3eef-b65a-496f-9cf1-7567825fce78_0(fc8d98f24f4ba9fe030ede819140e4148851ea91a46eabf08330595ad859646f): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/perses-operator-5446b9c989-66rht" podUID="a1cc3eef-b65a-496f-9cf1-7567825fce78" Dec 05 17:16:12 crc kubenswrapper[4753]: I1205 17:16:12.710021 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" Dec 05 17:16:12 crc kubenswrapper[4753]: I1205 17:16:12.710510 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" Dec 05 17:16:12 crc kubenswrapper[4753]: I1205 17:16:12.738328 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" Dec 05 17:16:19 crc kubenswrapper[4753]: I1205 17:16:19.720601 4753 scope.go:117] "RemoveContainer" containerID="eedfb2d21b98fca57c1ceac0b8e177906400b69a7b79c834b4a5059d01f98efe" Dec 05 17:16:19 crc kubenswrapper[4753]: E1205 17:16:19.721347 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-hpl8r_openshift-multus(6b3d3501-4f16-4375-adf2-fd54b1cd13cf)\"" pod="openshift-multus/multus-hpl8r" podUID="6b3d3501-4f16-4375-adf2-fd54b1cd13cf" Dec 05 17:16:23 crc kubenswrapper[4753]: I1205 17:16:23.720031 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-x8ww2" Dec 05 17:16:23 crc kubenswrapper[4753]: I1205 17:16:23.721572 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-x8ww2" Dec 05 17:16:23 crc kubenswrapper[4753]: E1205 17:16:23.755695 4753 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-x8ww2_openshift-operators_6b271246-3310-483c-a548-db788331725d_0(aa9f430f77ba27c813cf7197edaa1ba5a30a79d73de346cb0819f365001082c9): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 05 17:16:23 crc kubenswrapper[4753]: E1205 17:16:23.755822 4753 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-x8ww2_openshift-operators_6b271246-3310-483c-a548-db788331725d_0(aa9f430f77ba27c813cf7197edaa1ba5a30a79d73de346cb0819f365001082c9): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/observability-operator-d8bb48f5d-x8ww2" Dec 05 17:16:23 crc kubenswrapper[4753]: E1205 17:16:23.755869 4753 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-x8ww2_openshift-operators_6b271246-3310-483c-a548-db788331725d_0(aa9f430f77ba27c813cf7197edaa1ba5a30a79d73de346cb0819f365001082c9): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/observability-operator-d8bb48f5d-x8ww2" Dec 05 17:16:23 crc kubenswrapper[4753]: E1205 17:16:23.755950 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"observability-operator-d8bb48f5d-x8ww2_openshift-operators(6b271246-3310-483c-a548-db788331725d)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"observability-operator-d8bb48f5d-x8ww2_openshift-operators(6b271246-3310-483c-a548-db788331725d)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_observability-operator-d8bb48f5d-x8ww2_openshift-operators_6b271246-3310-483c-a548-db788331725d_0(aa9f430f77ba27c813cf7197edaa1ba5a30a79d73de346cb0819f365001082c9): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/observability-operator-d8bb48f5d-x8ww2" podUID="6b271246-3310-483c-a548-db788331725d" Dec 05 17:16:25 crc kubenswrapper[4753]: I1205 17:16:25.720564 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-57647d658-tsxw8" Dec 05 17:16:25 crc kubenswrapper[4753]: I1205 17:16:25.721259 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-57647d658-tsxw8" Dec 05 17:16:25 crc kubenswrapper[4753]: E1205 17:16:25.749019 4753 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-57647d658-tsxw8_openshift-operators_b1a19a2b-30b2-47bd-a4e3-cb23e37e16cf_0(2695dcf060f52af1f51bb56720905df32b7497885c4de63851c17172f788684b): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 05 17:16:25 crc kubenswrapper[4753]: E1205 17:16:25.749117 4753 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-57647d658-tsxw8_openshift-operators_b1a19a2b-30b2-47bd-a4e3-cb23e37e16cf_0(2695dcf060f52af1f51bb56720905df32b7497885c4de63851c17172f788684b): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-57647d658-tsxw8" Dec 05 17:16:25 crc kubenswrapper[4753]: E1205 17:16:25.749143 4753 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-57647d658-tsxw8_openshift-operators_b1a19a2b-30b2-47bd-a4e3-cb23e37e16cf_0(2695dcf060f52af1f51bb56720905df32b7497885c4de63851c17172f788684b): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-57647d658-tsxw8" Dec 05 17:16:25 crc kubenswrapper[4753]: E1205 17:16:25.749358 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-admission-webhook-57647d658-tsxw8_openshift-operators(b1a19a2b-30b2-47bd-a4e3-cb23e37e16cf)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-admission-webhook-57647d658-tsxw8_openshift-operators(b1a19a2b-30b2-47bd-a4e3-cb23e37e16cf)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-57647d658-tsxw8_openshift-operators_b1a19a2b-30b2-47bd-a4e3-cb23e37e16cf_0(2695dcf060f52af1f51bb56720905df32b7497885c4de63851c17172f788684b): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-57647d658-tsxw8" podUID="b1a19a2b-30b2-47bd-a4e3-cb23e37e16cf" Dec 05 17:16:26 crc kubenswrapper[4753]: I1205 17:16:26.719657 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-f4kz7" Dec 05 17:16:26 crc kubenswrapper[4753]: I1205 17:16:26.720713 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-f4kz7" Dec 05 17:16:26 crc kubenswrapper[4753]: E1205 17:16:26.745623 4753 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-f4kz7_openshift-operators_3a0c0fe6-2fda-4398-8f9b-4ba0b6b52182_0(a2451461ccedd2f3f707d7ec8d5ad68b37d5cb5c066a4c8f54e104079a2c541c): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 05 17:16:26 crc kubenswrapper[4753]: E1205 17:16:26.745794 4753 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-f4kz7_openshift-operators_3a0c0fe6-2fda-4398-8f9b-4ba0b6b52182_0(a2451461ccedd2f3f707d7ec8d5ad68b37d5cb5c066a4c8f54e104079a2c541c): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-f4kz7" Dec 05 17:16:26 crc kubenswrapper[4753]: E1205 17:16:26.745823 4753 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-f4kz7_openshift-operators_3a0c0fe6-2fda-4398-8f9b-4ba0b6b52182_0(a2451461ccedd2f3f707d7ec8d5ad68b37d5cb5c066a4c8f54e104079a2c541c): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-f4kz7" Dec 05 17:16:26 crc kubenswrapper[4753]: E1205 17:16:26.745891 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-668cf9dfbb-f4kz7_openshift-operators(3a0c0fe6-2fda-4398-8f9b-4ba0b6b52182)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-668cf9dfbb-f4kz7_openshift-operators(3a0c0fe6-2fda-4398-8f9b-4ba0b6b52182)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-668cf9dfbb-f4kz7_openshift-operators_3a0c0fe6-2fda-4398-8f9b-4ba0b6b52182_0(a2451461ccedd2f3f707d7ec8d5ad68b37d5cb5c066a4c8f54e104079a2c541c): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-f4kz7" podUID="3a0c0fe6-2fda-4398-8f9b-4ba0b6b52182" Dec 05 17:16:27 crc kubenswrapper[4753]: I1205 17:16:27.722463 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-57647d658-ds52w" Dec 05 17:16:27 crc kubenswrapper[4753]: I1205 17:16:27.722753 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-66rht" Dec 05 17:16:27 crc kubenswrapper[4753]: I1205 17:16:27.723451 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-66rht" Dec 05 17:16:27 crc kubenswrapper[4753]: I1205 17:16:27.723864 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-57647d658-ds52w" Dec 05 17:16:27 crc kubenswrapper[4753]: E1205 17:16:27.755682 4753 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-66rht_openshift-operators_a1cc3eef-b65a-496f-9cf1-7567825fce78_0(9c5094edabe0585e63db1aafb540004745d46e887d49640c9e4f2e6158987194): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 05 17:16:27 crc kubenswrapper[4753]: E1205 17:16:27.755776 4753 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-66rht_openshift-operators_a1cc3eef-b65a-496f-9cf1-7567825fce78_0(9c5094edabe0585e63db1aafb540004745d46e887d49640c9e4f2e6158987194): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/perses-operator-5446b9c989-66rht" Dec 05 17:16:27 crc kubenswrapper[4753]: E1205 17:16:27.755844 4753 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-66rht_openshift-operators_a1cc3eef-b65a-496f-9cf1-7567825fce78_0(9c5094edabe0585e63db1aafb540004745d46e887d49640c9e4f2e6158987194): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/perses-operator-5446b9c989-66rht" Dec 05 17:16:27 crc kubenswrapper[4753]: E1205 17:16:27.755904 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"perses-operator-5446b9c989-66rht_openshift-operators(a1cc3eef-b65a-496f-9cf1-7567825fce78)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"perses-operator-5446b9c989-66rht_openshift-operators(a1cc3eef-b65a-496f-9cf1-7567825fce78)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_perses-operator-5446b9c989-66rht_openshift-operators_a1cc3eef-b65a-496f-9cf1-7567825fce78_0(9c5094edabe0585e63db1aafb540004745d46e887d49640c9e4f2e6158987194): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/perses-operator-5446b9c989-66rht" podUID="a1cc3eef-b65a-496f-9cf1-7567825fce78" Dec 05 17:16:27 crc kubenswrapper[4753]: E1205 17:16:27.768201 4753 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-57647d658-ds52w_openshift-operators_96a23d8c-f2af-4e5d-afa5-4734f81f73ef_0(ce2577d358403e7684f18ef02a219faf9701888b6d9672612bf998a5443a901c): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 05 17:16:27 crc kubenswrapper[4753]: E1205 17:16:27.768300 4753 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-57647d658-ds52w_openshift-operators_96a23d8c-f2af-4e5d-afa5-4734f81f73ef_0(ce2577d358403e7684f18ef02a219faf9701888b6d9672612bf998a5443a901c): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-57647d658-ds52w" Dec 05 17:16:27 crc kubenswrapper[4753]: E1205 17:16:27.768332 4753 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-57647d658-ds52w_openshift-operators_96a23d8c-f2af-4e5d-afa5-4734f81f73ef_0(ce2577d358403e7684f18ef02a219faf9701888b6d9672612bf998a5443a901c): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-operators/obo-prometheus-operator-admission-webhook-57647d658-ds52w" Dec 05 17:16:27 crc kubenswrapper[4753]: E1205 17:16:27.768396 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"obo-prometheus-operator-admission-webhook-57647d658-ds52w_openshift-operators(96a23d8c-f2af-4e5d-afa5-4734f81f73ef)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"obo-prometheus-operator-admission-webhook-57647d658-ds52w_openshift-operators(96a23d8c-f2af-4e5d-afa5-4734f81f73ef)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_obo-prometheus-operator-admission-webhook-57647d658-ds52w_openshift-operators_96a23d8c-f2af-4e5d-afa5-4734f81f73ef_0(ce2577d358403e7684f18ef02a219faf9701888b6d9672612bf998a5443a901c): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-operators/obo-prometheus-operator-admission-webhook-57647d658-ds52w" podUID="96a23d8c-f2af-4e5d-afa5-4734f81f73ef" Dec 05 17:16:34 crc kubenswrapper[4753]: I1205 17:16:34.721377 4753 scope.go:117] "RemoveContainer" containerID="eedfb2d21b98fca57c1ceac0b8e177906400b69a7b79c834b4a5059d01f98efe" Dec 05 17:16:35 crc kubenswrapper[4753]: I1205 17:16:35.178061 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-68f4b" Dec 05 17:16:35 crc kubenswrapper[4753]: I1205 17:16:35.872081 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-hpl8r_6b3d3501-4f16-4375-adf2-fd54b1cd13cf/kube-multus/2.log" Dec 05 17:16:35 crc kubenswrapper[4753]: I1205 17:16:35.872670 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-hpl8r" event={"ID":"6b3d3501-4f16-4375-adf2-fd54b1cd13cf","Type":"ContainerStarted","Data":"1aa08e983cd90e31912c27abb8bf158d07715cf8bd89720a27368f98f9f590e8"} Dec 05 17:16:36 crc kubenswrapper[4753]: I1205 17:16:36.719571 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-57647d658-tsxw8" Dec 05 17:16:36 crc kubenswrapper[4753]: I1205 17:16:36.720437 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-57647d658-tsxw8" Dec 05 17:16:37 crc kubenswrapper[4753]: I1205 17:16:37.005082 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-57647d658-tsxw8"] Dec 05 17:16:37 crc kubenswrapper[4753]: W1205 17:16:37.011312 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb1a19a2b_30b2_47bd_a4e3_cb23e37e16cf.slice/crio-bafd7edec5e64afc0ef0225606615e9a5e0a255795094dd7b21ab89f1044c56d WatchSource:0}: Error finding container bafd7edec5e64afc0ef0225606615e9a5e0a255795094dd7b21ab89f1044c56d: Status 404 returned error can't find the container with id bafd7edec5e64afc0ef0225606615e9a5e0a255795094dd7b21ab89f1044c56d Dec 05 17:16:37 crc kubenswrapper[4753]: I1205 17:16:37.720434 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-f4kz7" Dec 05 17:16:37 crc kubenswrapper[4753]: I1205 17:16:37.720479 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-x8ww2" Dec 05 17:16:37 crc kubenswrapper[4753]: I1205 17:16:37.721088 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-d8bb48f5d-x8ww2" Dec 05 17:16:37 crc kubenswrapper[4753]: I1205 17:16:37.721087 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-f4kz7" Dec 05 17:16:37 crc kubenswrapper[4753]: I1205 17:16:37.886135 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-57647d658-tsxw8" event={"ID":"b1a19a2b-30b2-47bd-a4e3-cb23e37e16cf","Type":"ContainerStarted","Data":"bafd7edec5e64afc0ef0225606615e9a5e0a255795094dd7b21ab89f1044c56d"} Dec 05 17:16:37 crc kubenswrapper[4753]: I1205 17:16:37.971567 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-668cf9dfbb-f4kz7"] Dec 05 17:16:37 crc kubenswrapper[4753]: W1205 17:16:37.978339 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3a0c0fe6_2fda_4398_8f9b_4ba0b6b52182.slice/crio-365176c5f67186209fb54de394818d74767c06d7feafcfa867abe7a22ba82886 WatchSource:0}: Error finding container 365176c5f67186209fb54de394818d74767c06d7feafcfa867abe7a22ba82886: Status 404 returned error can't find the container with id 365176c5f67186209fb54de394818d74767c06d7feafcfa867abe7a22ba82886 Dec 05 17:16:38 crc kubenswrapper[4753]: I1205 17:16:38.217125 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-d8bb48f5d-x8ww2"] Dec 05 17:16:38 crc kubenswrapper[4753]: I1205 17:16:38.895733 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-f4kz7" event={"ID":"3a0c0fe6-2fda-4398-8f9b-4ba0b6b52182","Type":"ContainerStarted","Data":"365176c5f67186209fb54de394818d74767c06d7feafcfa867abe7a22ba82886"} Dec 05 17:16:38 crc kubenswrapper[4753]: I1205 17:16:38.897010 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-d8bb48f5d-x8ww2" event={"ID":"6b271246-3310-483c-a548-db788331725d","Type":"ContainerStarted","Data":"f3582ee98a3b68cf0b8c84cd36b2d34f16e20b91e2476da784e6ddfc804c30f4"} Dec 05 17:16:40 crc kubenswrapper[4753]: I1205 17:16:40.720506 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-66rht" Dec 05 17:16:40 crc kubenswrapper[4753]: I1205 17:16:40.720529 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-57647d658-ds52w" Dec 05 17:16:40 crc kubenswrapper[4753]: I1205 17:16:40.721291 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-57647d658-ds52w" Dec 05 17:16:40 crc kubenswrapper[4753]: I1205 17:16:40.721301 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5446b9c989-66rht" Dec 05 17:16:50 crc kubenswrapper[4753]: I1205 17:16:50.052506 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-57647d658-ds52w"] Dec 05 17:16:50 crc kubenswrapper[4753]: W1205 17:16:50.073622 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod96a23d8c_f2af_4e5d_afa5_4734f81f73ef.slice/crio-767d290a85efa60e11fedeb45714810bf30ed86e763e8a4c0db381d452992951 WatchSource:0}: Error finding container 767d290a85efa60e11fedeb45714810bf30ed86e763e8a4c0db381d452992951: Status 404 returned error can't find the container with id 767d290a85efa60e11fedeb45714810bf30ed86e763e8a4c0db381d452992951 Dec 05 17:16:50 crc kubenswrapper[4753]: I1205 17:16:50.097875 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5446b9c989-66rht"] Dec 05 17:16:50 crc kubenswrapper[4753]: W1205 17:16:50.175604 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda1cc3eef_b65a_496f_9cf1_7567825fce78.slice/crio-5dd382d046434553bb3aa14a906eba3515f6cfa6f4842fdc8f3e18dc58f4d1f3 WatchSource:0}: Error finding container 5dd382d046434553bb3aa14a906eba3515f6cfa6f4842fdc8f3e18dc58f4d1f3: Status 404 returned error can't find the container with id 5dd382d046434553bb3aa14a906eba3515f6cfa6f4842fdc8f3e18dc58f4d1f3 Dec 05 17:16:50 crc kubenswrapper[4753]: I1205 17:16:50.992878 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5446b9c989-66rht" event={"ID":"a1cc3eef-b65a-496f-9cf1-7567825fce78","Type":"ContainerStarted","Data":"5dd382d046434553bb3aa14a906eba3515f6cfa6f4842fdc8f3e18dc58f4d1f3"} Dec 05 17:16:50 crc kubenswrapper[4753]: I1205 17:16:50.994997 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-57647d658-ds52w" event={"ID":"96a23d8c-f2af-4e5d-afa5-4734f81f73ef","Type":"ContainerStarted","Data":"9385dcdf71c7a7229ef4676a5e998c23af56eb45640edf77a150367a4ce01699"} Dec 05 17:16:50 crc kubenswrapper[4753]: I1205 17:16:50.995066 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-57647d658-ds52w" event={"ID":"96a23d8c-f2af-4e5d-afa5-4734f81f73ef","Type":"ContainerStarted","Data":"767d290a85efa60e11fedeb45714810bf30ed86e763e8a4c0db381d452992951"} Dec 05 17:16:50 crc kubenswrapper[4753]: I1205 17:16:50.996983 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-d8bb48f5d-x8ww2" event={"ID":"6b271246-3310-483c-a548-db788331725d","Type":"ContainerStarted","Data":"c827b84736aad7fbfce1688099f8e2ab3afdd59357ba9e45abfabcc9e8b19d6e"} Dec 05 17:16:50 crc kubenswrapper[4753]: I1205 17:16:50.997202 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/observability-operator-d8bb48f5d-x8ww2" Dec 05 17:16:51 crc kubenswrapper[4753]: I1205 17:16:51.001381 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-f4kz7" event={"ID":"3a0c0fe6-2fda-4398-8f9b-4ba0b6b52182","Type":"ContainerStarted","Data":"8e93c0e437cdc6cc715406ea52528ae934fdd89f9eb701e7f8af7af7e40526a1"} Dec 05 17:16:51 crc kubenswrapper[4753]: I1205 17:16:51.003307 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-57647d658-tsxw8" event={"ID":"b1a19a2b-30b2-47bd-a4e3-cb23e37e16cf","Type":"ContainerStarted","Data":"f554d3cc0a80bf00c420dd4d7bc887bd5b1a53943ca3f9308dffb250c5908d84"} Dec 05 17:16:51 crc kubenswrapper[4753]: I1205 17:16:51.022796 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-57647d658-ds52w" podStartSLOduration=41.022761837 podStartE2EDuration="41.022761837s" podCreationTimestamp="2025-12-05 17:16:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:16:51.0214507 +0000 UTC m=+749.524557726" watchObservedRunningTime="2025-12-05 17:16:51.022761837 +0000 UTC m=+749.525868843" Dec 05 17:16:51 crc kubenswrapper[4753]: I1205 17:16:51.055069 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-668cf9dfbb-f4kz7" podStartSLOduration=29.107956797 podStartE2EDuration="41.055028897s" podCreationTimestamp="2025-12-05 17:16:10 +0000 UTC" firstStartedPulling="2025-12-05 17:16:37.981605124 +0000 UTC m=+736.484712140" lastFinishedPulling="2025-12-05 17:16:49.928677234 +0000 UTC m=+748.431784240" observedRunningTime="2025-12-05 17:16:51.041086555 +0000 UTC m=+749.544193581" watchObservedRunningTime="2025-12-05 17:16:51.055028897 +0000 UTC m=+749.558135913" Dec 05 17:16:51 crc kubenswrapper[4753]: I1205 17:16:51.068309 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/observability-operator-d8bb48f5d-x8ww2" Dec 05 17:16:51 crc kubenswrapper[4753]: I1205 17:16:51.069972 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-57647d658-tsxw8" podStartSLOduration=28.201267831 podStartE2EDuration="41.069954567s" podCreationTimestamp="2025-12-05 17:16:10 +0000 UTC" firstStartedPulling="2025-12-05 17:16:37.024016051 +0000 UTC m=+735.527123067" lastFinishedPulling="2025-12-05 17:16:49.892702787 +0000 UTC m=+748.395809803" observedRunningTime="2025-12-05 17:16:51.069312628 +0000 UTC m=+749.572419634" watchObservedRunningTime="2025-12-05 17:16:51.069954567 +0000 UTC m=+749.573061573" Dec 05 17:16:51 crc kubenswrapper[4753]: I1205 17:16:51.107400 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/observability-operator-d8bb48f5d-x8ww2" podStartSLOduration=29.443964966 podStartE2EDuration="41.107361614s" podCreationTimestamp="2025-12-05 17:16:10 +0000 UTC" firstStartedPulling="2025-12-05 17:16:38.229206766 +0000 UTC m=+736.732313812" lastFinishedPulling="2025-12-05 17:16:49.892603454 +0000 UTC m=+748.395710460" observedRunningTime="2025-12-05 17:16:51.100754174 +0000 UTC m=+749.603861170" watchObservedRunningTime="2025-12-05 17:16:51.107361614 +0000 UTC m=+749.610468640" Dec 05 17:16:54 crc kubenswrapper[4753]: I1205 17:16:54.170582 4753 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 05 17:16:56 crc kubenswrapper[4753]: I1205 17:16:56.036473 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5446b9c989-66rht" event={"ID":"a1cc3eef-b65a-496f-9cf1-7567825fce78","Type":"ContainerStarted","Data":"55909e1b170198567d850d125d627c944eb26814fbdfc722eb7edfad22e4c41c"} Dec 05 17:16:56 crc kubenswrapper[4753]: I1205 17:16:56.037011 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/perses-operator-5446b9c989-66rht" Dec 05 17:16:56 crc kubenswrapper[4753]: I1205 17:16:56.059107 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/perses-operator-5446b9c989-66rht" podStartSLOduration=39.796595463 podStartE2EDuration="45.059084506s" podCreationTimestamp="2025-12-05 17:16:11 +0000 UTC" firstStartedPulling="2025-12-05 17:16:50.177562452 +0000 UTC m=+748.680669458" lastFinishedPulling="2025-12-05 17:16:55.440051495 +0000 UTC m=+753.943158501" observedRunningTime="2025-12-05 17:16:56.054746351 +0000 UTC m=+754.557853347" watchObservedRunningTime="2025-12-05 17:16:56.059084506 +0000 UTC m=+754.562191512" Dec 05 17:17:01 crc kubenswrapper[4753]: I1205 17:17:01.445560 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/perses-operator-5446b9c989-66rht" Dec 05 17:17:01 crc kubenswrapper[4753]: I1205 17:17:01.486401 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-nqxn4"] Dec 05 17:17:01 crc kubenswrapper[4753]: I1205 17:17:01.487541 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-nqxn4" Dec 05 17:17:01 crc kubenswrapper[4753]: I1205 17:17:01.490905 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-5b446d88c5-mt28j"] Dec 05 17:17:01 crc kubenswrapper[4753]: I1205 17:17:01.491783 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-mt28j" Dec 05 17:17:01 crc kubenswrapper[4753]: I1205 17:17:01.496257 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Dec 05 17:17:01 crc kubenswrapper[4753]: I1205 17:17:01.496342 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Dec 05 17:17:01 crc kubenswrapper[4753]: I1205 17:17:01.497746 4753 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-9snqw" Dec 05 17:17:01 crc kubenswrapper[4753]: I1205 17:17:01.503473 4753 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-msmj2" Dec 05 17:17:01 crc kubenswrapper[4753]: I1205 17:17:01.503908 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-nqxn4"] Dec 05 17:17:01 crc kubenswrapper[4753]: I1205 17:17:01.514370 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-mt28j"] Dec 05 17:17:01 crc kubenswrapper[4753]: I1205 17:17:01.534128 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-scp9f"] Dec 05 17:17:01 crc kubenswrapper[4753]: I1205 17:17:01.535338 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-scp9f" Dec 05 17:17:01 crc kubenswrapper[4753]: I1205 17:17:01.538502 4753 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-k6knp" Dec 05 17:17:01 crc kubenswrapper[4753]: I1205 17:17:01.549257 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-scp9f"] Dec 05 17:17:01 crc kubenswrapper[4753]: I1205 17:17:01.595233 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zdv5f\" (UniqueName: \"kubernetes.io/projected/07264007-14c8-49d7-b4f7-ee34bad54bca-kube-api-access-zdv5f\") pod \"cert-manager-cainjector-7f985d654d-nqxn4\" (UID: \"07264007-14c8-49d7-b4f7-ee34bad54bca\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-nqxn4" Dec 05 17:17:01 crc kubenswrapper[4753]: I1205 17:17:01.595355 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7mjjh\" (UniqueName: \"kubernetes.io/projected/62c975a0-bab2-45d8-9b51-d4bbdf2a5ea6-kube-api-access-7mjjh\") pod \"cert-manager-webhook-5655c58dd6-scp9f\" (UID: \"62c975a0-bab2-45d8-9b51-d4bbdf2a5ea6\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-scp9f" Dec 05 17:17:01 crc kubenswrapper[4753]: I1205 17:17:01.595400 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-krdwb\" (UniqueName: \"kubernetes.io/projected/2406b66f-c017-41be-b22e-0a1b748b2fff-kube-api-access-krdwb\") pod \"cert-manager-5b446d88c5-mt28j\" (UID: \"2406b66f-c017-41be-b22e-0a1b748b2fff\") " pod="cert-manager/cert-manager-5b446d88c5-mt28j" Dec 05 17:17:01 crc kubenswrapper[4753]: I1205 17:17:01.696614 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7mjjh\" (UniqueName: \"kubernetes.io/projected/62c975a0-bab2-45d8-9b51-d4bbdf2a5ea6-kube-api-access-7mjjh\") pod \"cert-manager-webhook-5655c58dd6-scp9f\" (UID: \"62c975a0-bab2-45d8-9b51-d4bbdf2a5ea6\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-scp9f" Dec 05 17:17:01 crc kubenswrapper[4753]: I1205 17:17:01.696719 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-krdwb\" (UniqueName: \"kubernetes.io/projected/2406b66f-c017-41be-b22e-0a1b748b2fff-kube-api-access-krdwb\") pod \"cert-manager-5b446d88c5-mt28j\" (UID: \"2406b66f-c017-41be-b22e-0a1b748b2fff\") " pod="cert-manager/cert-manager-5b446d88c5-mt28j" Dec 05 17:17:01 crc kubenswrapper[4753]: I1205 17:17:01.696774 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zdv5f\" (UniqueName: \"kubernetes.io/projected/07264007-14c8-49d7-b4f7-ee34bad54bca-kube-api-access-zdv5f\") pod \"cert-manager-cainjector-7f985d654d-nqxn4\" (UID: \"07264007-14c8-49d7-b4f7-ee34bad54bca\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-nqxn4" Dec 05 17:17:01 crc kubenswrapper[4753]: I1205 17:17:01.725313 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zdv5f\" (UniqueName: \"kubernetes.io/projected/07264007-14c8-49d7-b4f7-ee34bad54bca-kube-api-access-zdv5f\") pod \"cert-manager-cainjector-7f985d654d-nqxn4\" (UID: \"07264007-14c8-49d7-b4f7-ee34bad54bca\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-nqxn4" Dec 05 17:17:01 crc kubenswrapper[4753]: I1205 17:17:01.731566 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7mjjh\" (UniqueName: \"kubernetes.io/projected/62c975a0-bab2-45d8-9b51-d4bbdf2a5ea6-kube-api-access-7mjjh\") pod \"cert-manager-webhook-5655c58dd6-scp9f\" (UID: \"62c975a0-bab2-45d8-9b51-d4bbdf2a5ea6\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-scp9f" Dec 05 17:17:01 crc kubenswrapper[4753]: I1205 17:17:01.731949 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-krdwb\" (UniqueName: \"kubernetes.io/projected/2406b66f-c017-41be-b22e-0a1b748b2fff-kube-api-access-krdwb\") pod \"cert-manager-5b446d88c5-mt28j\" (UID: \"2406b66f-c017-41be-b22e-0a1b748b2fff\") " pod="cert-manager/cert-manager-5b446d88c5-mt28j" Dec 05 17:17:01 crc kubenswrapper[4753]: I1205 17:17:01.812523 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-nqxn4" Dec 05 17:17:01 crc kubenswrapper[4753]: I1205 17:17:01.826577 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-mt28j" Dec 05 17:17:01 crc kubenswrapper[4753]: I1205 17:17:01.857822 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-scp9f" Dec 05 17:17:02 crc kubenswrapper[4753]: I1205 17:17:02.149530 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-mt28j"] Dec 05 17:17:02 crc kubenswrapper[4753]: W1205 17:17:02.158745 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2406b66f_c017_41be_b22e_0a1b748b2fff.slice/crio-558d861b848e80fb7891ec3513ef503b57f782e40cac6772f6365e0a3cf79480 WatchSource:0}: Error finding container 558d861b848e80fb7891ec3513ef503b57f782e40cac6772f6365e0a3cf79480: Status 404 returned error can't find the container with id 558d861b848e80fb7891ec3513ef503b57f782e40cac6772f6365e0a3cf79480 Dec 05 17:17:02 crc kubenswrapper[4753]: I1205 17:17:02.212388 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-scp9f"] Dec 05 17:17:02 crc kubenswrapper[4753]: I1205 17:17:02.239872 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-nqxn4"] Dec 05 17:17:03 crc kubenswrapper[4753]: I1205 17:17:03.091922 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-scp9f" event={"ID":"62c975a0-bab2-45d8-9b51-d4bbdf2a5ea6","Type":"ContainerStarted","Data":"d2174adb49f385d7d4a763350179141072c7c7b9530ccadf42d05f5934d8877e"} Dec 05 17:17:03 crc kubenswrapper[4753]: I1205 17:17:03.095263 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-nqxn4" event={"ID":"07264007-14c8-49d7-b4f7-ee34bad54bca","Type":"ContainerStarted","Data":"63d77e460a7f0f8ce8b15654099c656119215630c74f689e222a8526a43c5279"} Dec 05 17:17:03 crc kubenswrapper[4753]: I1205 17:17:03.097793 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-mt28j" event={"ID":"2406b66f-c017-41be-b22e-0a1b748b2fff","Type":"ContainerStarted","Data":"558d861b848e80fb7891ec3513ef503b57f782e40cac6772f6365e0a3cf79480"} Dec 05 17:17:08 crc kubenswrapper[4753]: I1205 17:17:08.128951 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-nqxn4" event={"ID":"07264007-14c8-49d7-b4f7-ee34bad54bca","Type":"ContainerStarted","Data":"bf4ed92baec271fa2455849fbe24a63734587f342a5944ce0d9c5093708edc72"} Dec 05 17:17:08 crc kubenswrapper[4753]: I1205 17:17:08.131399 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-mt28j" event={"ID":"2406b66f-c017-41be-b22e-0a1b748b2fff","Type":"ContainerStarted","Data":"69b2b8321af20091066278c5b7741ee237accea0917d7552bbbb701d552f2d64"} Dec 05 17:17:08 crc kubenswrapper[4753]: I1205 17:17:08.133294 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-scp9f" event={"ID":"62c975a0-bab2-45d8-9b51-d4bbdf2a5ea6","Type":"ContainerStarted","Data":"a0c05606e60c13707d6cc15a90fae7fea835a8a6190d6872ad3f67ccc4cc768b"} Dec 05 17:17:08 crc kubenswrapper[4753]: I1205 17:17:08.133467 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-5655c58dd6-scp9f" Dec 05 17:17:08 crc kubenswrapper[4753]: I1205 17:17:08.155123 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7f985d654d-nqxn4" podStartSLOduration=1.9218597229999999 podStartE2EDuration="7.155092374s" podCreationTimestamp="2025-12-05 17:17:01 +0000 UTC" firstStartedPulling="2025-12-05 17:17:02.248108677 +0000 UTC m=+760.751215683" lastFinishedPulling="2025-12-05 17:17:07.481341308 +0000 UTC m=+765.984448334" observedRunningTime="2025-12-05 17:17:08.148947637 +0000 UTC m=+766.652054663" watchObservedRunningTime="2025-12-05 17:17:08.155092374 +0000 UTC m=+766.658199380" Dec 05 17:17:08 crc kubenswrapper[4753]: I1205 17:17:08.168122 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-5655c58dd6-scp9f" podStartSLOduration=1.910433234 podStartE2EDuration="7.168075788s" podCreationTimestamp="2025-12-05 17:17:01 +0000 UTC" firstStartedPulling="2025-12-05 17:17:02.231462637 +0000 UTC m=+760.734569643" lastFinishedPulling="2025-12-05 17:17:07.489105191 +0000 UTC m=+765.992212197" observedRunningTime="2025-12-05 17:17:08.165836853 +0000 UTC m=+766.668943859" watchObservedRunningTime="2025-12-05 17:17:08.168075788 +0000 UTC m=+766.671182794" Dec 05 17:17:08 crc kubenswrapper[4753]: I1205 17:17:08.184325 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-5b446d88c5-mt28j" podStartSLOduration=1.81346279 podStartE2EDuration="7.184297635s" podCreationTimestamp="2025-12-05 17:17:01 +0000 UTC" firstStartedPulling="2025-12-05 17:17:02.163102038 +0000 UTC m=+760.666209044" lastFinishedPulling="2025-12-05 17:17:07.533936883 +0000 UTC m=+766.037043889" observedRunningTime="2025-12-05 17:17:08.179425665 +0000 UTC m=+766.682532671" watchObservedRunningTime="2025-12-05 17:17:08.184297635 +0000 UTC m=+766.687404651" Dec 05 17:17:16 crc kubenswrapper[4753]: I1205 17:17:16.863535 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-5655c58dd6-scp9f" Dec 05 17:17:28 crc kubenswrapper[4753]: I1205 17:17:28.979763 4753 patch_prober.go:28] interesting pod/machine-config-daemon-khn68 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 17:17:28 crc kubenswrapper[4753]: I1205 17:17:28.980658 4753 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 17:17:47 crc kubenswrapper[4753]: I1205 17:17:47.716680 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c16swx2"] Dec 05 17:17:47 crc kubenswrapper[4753]: I1205 17:17:47.718605 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c16swx2" Dec 05 17:17:47 crc kubenswrapper[4753]: I1205 17:17:47.724458 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 05 17:17:47 crc kubenswrapper[4753]: I1205 17:17:47.737546 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c16swx2"] Dec 05 17:17:47 crc kubenswrapper[4753]: I1205 17:17:47.903217 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/940d564a-350c-4480-beae-dda46c53287a-util\") pod \"7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c16swx2\" (UID: \"940d564a-350c-4480-beae-dda46c53287a\") " pod="openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c16swx2" Dec 05 17:17:47 crc kubenswrapper[4753]: I1205 17:17:47.903376 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/940d564a-350c-4480-beae-dda46c53287a-bundle\") pod \"7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c16swx2\" (UID: \"940d564a-350c-4480-beae-dda46c53287a\") " pod="openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c16swx2" Dec 05 17:17:47 crc kubenswrapper[4753]: I1205 17:17:47.903438 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lxdbx\" (UniqueName: \"kubernetes.io/projected/940d564a-350c-4480-beae-dda46c53287a-kube-api-access-lxdbx\") pod \"7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c16swx2\" (UID: \"940d564a-350c-4480-beae-dda46c53287a\") " pod="openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c16swx2" Dec 05 17:17:48 crc kubenswrapper[4753]: I1205 17:17:48.005473 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/940d564a-350c-4480-beae-dda46c53287a-bundle\") pod \"7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c16swx2\" (UID: \"940d564a-350c-4480-beae-dda46c53287a\") " pod="openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c16swx2" Dec 05 17:17:48 crc kubenswrapper[4753]: I1205 17:17:48.005565 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lxdbx\" (UniqueName: \"kubernetes.io/projected/940d564a-350c-4480-beae-dda46c53287a-kube-api-access-lxdbx\") pod \"7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c16swx2\" (UID: \"940d564a-350c-4480-beae-dda46c53287a\") " pod="openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c16swx2" Dec 05 17:17:48 crc kubenswrapper[4753]: I1205 17:17:48.005599 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/940d564a-350c-4480-beae-dda46c53287a-util\") pod \"7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c16swx2\" (UID: \"940d564a-350c-4480-beae-dda46c53287a\") " pod="openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c16swx2" Dec 05 17:17:48 crc kubenswrapper[4753]: I1205 17:17:48.006232 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/940d564a-350c-4480-beae-dda46c53287a-util\") pod \"7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c16swx2\" (UID: \"940d564a-350c-4480-beae-dda46c53287a\") " pod="openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c16swx2" Dec 05 17:17:48 crc kubenswrapper[4753]: I1205 17:17:48.006253 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/940d564a-350c-4480-beae-dda46c53287a-bundle\") pod \"7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c16swx2\" (UID: \"940d564a-350c-4480-beae-dda46c53287a\") " pod="openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c16swx2" Dec 05 17:17:48 crc kubenswrapper[4753]: I1205 17:17:48.042505 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lxdbx\" (UniqueName: \"kubernetes.io/projected/940d564a-350c-4480-beae-dda46c53287a-kube-api-access-lxdbx\") pod \"7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c16swx2\" (UID: \"940d564a-350c-4480-beae-dda46c53287a\") " pod="openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c16swx2" Dec 05 17:17:48 crc kubenswrapper[4753]: I1205 17:17:48.337798 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c16swx2" Dec 05 17:17:48 crc kubenswrapper[4753]: I1205 17:17:48.643800 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c16swx2"] Dec 05 17:17:49 crc kubenswrapper[4753]: I1205 17:17:49.339352 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-h7nrn"] Dec 05 17:17:49 crc kubenswrapper[4753]: I1205 17:17:49.342937 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-h7nrn" Dec 05 17:17:49 crc kubenswrapper[4753]: I1205 17:17:49.355963 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-h7nrn"] Dec 05 17:17:49 crc kubenswrapper[4753]: I1205 17:17:49.427139 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9f941c89-6e05-43a8-8667-ac7f1c799b83-utilities\") pod \"redhat-operators-h7nrn\" (UID: \"9f941c89-6e05-43a8-8667-ac7f1c799b83\") " pod="openshift-marketplace/redhat-operators-h7nrn" Dec 05 17:17:49 crc kubenswrapper[4753]: I1205 17:17:49.427248 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lwhxb\" (UniqueName: \"kubernetes.io/projected/9f941c89-6e05-43a8-8667-ac7f1c799b83-kube-api-access-lwhxb\") pod \"redhat-operators-h7nrn\" (UID: \"9f941c89-6e05-43a8-8667-ac7f1c799b83\") " pod="openshift-marketplace/redhat-operators-h7nrn" Dec 05 17:17:49 crc kubenswrapper[4753]: I1205 17:17:49.427351 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9f941c89-6e05-43a8-8667-ac7f1c799b83-catalog-content\") pod \"redhat-operators-h7nrn\" (UID: \"9f941c89-6e05-43a8-8667-ac7f1c799b83\") " pod="openshift-marketplace/redhat-operators-h7nrn" Dec 05 17:17:49 crc kubenswrapper[4753]: I1205 17:17:49.503718 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c16swx2" event={"ID":"940d564a-350c-4480-beae-dda46c53287a","Type":"ContainerStarted","Data":"1e1e075250252361e99fb20701247b1c8f710addc5f22855a15bb3210e3316b1"} Dec 05 17:17:49 crc kubenswrapper[4753]: I1205 17:17:49.529251 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9f941c89-6e05-43a8-8667-ac7f1c799b83-catalog-content\") pod \"redhat-operators-h7nrn\" (UID: \"9f941c89-6e05-43a8-8667-ac7f1c799b83\") " pod="openshift-marketplace/redhat-operators-h7nrn" Dec 05 17:17:49 crc kubenswrapper[4753]: I1205 17:17:49.529331 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9f941c89-6e05-43a8-8667-ac7f1c799b83-utilities\") pod \"redhat-operators-h7nrn\" (UID: \"9f941c89-6e05-43a8-8667-ac7f1c799b83\") " pod="openshift-marketplace/redhat-operators-h7nrn" Dec 05 17:17:49 crc kubenswrapper[4753]: I1205 17:17:49.529377 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lwhxb\" (UniqueName: \"kubernetes.io/projected/9f941c89-6e05-43a8-8667-ac7f1c799b83-kube-api-access-lwhxb\") pod \"redhat-operators-h7nrn\" (UID: \"9f941c89-6e05-43a8-8667-ac7f1c799b83\") " pod="openshift-marketplace/redhat-operators-h7nrn" Dec 05 17:17:49 crc kubenswrapper[4753]: I1205 17:17:49.530058 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9f941c89-6e05-43a8-8667-ac7f1c799b83-catalog-content\") pod \"redhat-operators-h7nrn\" (UID: \"9f941c89-6e05-43a8-8667-ac7f1c799b83\") " pod="openshift-marketplace/redhat-operators-h7nrn" Dec 05 17:17:49 crc kubenswrapper[4753]: I1205 17:17:49.530285 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9f941c89-6e05-43a8-8667-ac7f1c799b83-utilities\") pod \"redhat-operators-h7nrn\" (UID: \"9f941c89-6e05-43a8-8667-ac7f1c799b83\") " pod="openshift-marketplace/redhat-operators-h7nrn" Dec 05 17:17:49 crc kubenswrapper[4753]: I1205 17:17:49.555092 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lwhxb\" (UniqueName: \"kubernetes.io/projected/9f941c89-6e05-43a8-8667-ac7f1c799b83-kube-api-access-lwhxb\") pod \"redhat-operators-h7nrn\" (UID: \"9f941c89-6e05-43a8-8667-ac7f1c799b83\") " pod="openshift-marketplace/redhat-operators-h7nrn" Dec 05 17:17:49 crc kubenswrapper[4753]: I1205 17:17:49.666485 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-h7nrn" Dec 05 17:17:50 crc kubenswrapper[4753]: I1205 17:17:50.143933 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-h7nrn"] Dec 05 17:17:50 crc kubenswrapper[4753]: W1205 17:17:50.149354 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9f941c89_6e05_43a8_8667_ac7f1c799b83.slice/crio-bb4392296d0a81ad735c01fdbe3c1ba6814c6fec14be54c5d9a014ff2ab127c7 WatchSource:0}: Error finding container bb4392296d0a81ad735c01fdbe3c1ba6814c6fec14be54c5d9a014ff2ab127c7: Status 404 returned error can't find the container with id bb4392296d0a81ad735c01fdbe3c1ba6814c6fec14be54c5d9a014ff2ab127c7 Dec 05 17:17:50 crc kubenswrapper[4753]: I1205 17:17:50.512906 4753 generic.go:334] "Generic (PLEG): container finished" podID="940d564a-350c-4480-beae-dda46c53287a" containerID="bb13c9fda3a9d693ffa8175ec1eb90425c15e91a6a964b192b86b16e3d6c5361" exitCode=0 Dec 05 17:17:50 crc kubenswrapper[4753]: I1205 17:17:50.513020 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c16swx2" event={"ID":"940d564a-350c-4480-beae-dda46c53287a","Type":"ContainerDied","Data":"bb13c9fda3a9d693ffa8175ec1eb90425c15e91a6a964b192b86b16e3d6c5361"} Dec 05 17:17:50 crc kubenswrapper[4753]: I1205 17:17:50.519120 4753 generic.go:334] "Generic (PLEG): container finished" podID="9f941c89-6e05-43a8-8667-ac7f1c799b83" containerID="4e10b91765340c3f095e2f1824b44f1d76068b6210cc9fd15b5a2c54484bae79" exitCode=0 Dec 05 17:17:50 crc kubenswrapper[4753]: I1205 17:17:50.519190 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h7nrn" event={"ID":"9f941c89-6e05-43a8-8667-ac7f1c799b83","Type":"ContainerDied","Data":"4e10b91765340c3f095e2f1824b44f1d76068b6210cc9fd15b5a2c54484bae79"} Dec 05 17:17:50 crc kubenswrapper[4753]: I1205 17:17:50.519222 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h7nrn" event={"ID":"9f941c89-6e05-43a8-8667-ac7f1c799b83","Type":"ContainerStarted","Data":"bb4392296d0a81ad735c01fdbe3c1ba6814c6fec14be54c5d9a014ff2ab127c7"} Dec 05 17:17:51 crc kubenswrapper[4753]: I1205 17:17:51.397698 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["minio-dev/minio"] Dec 05 17:17:51 crc kubenswrapper[4753]: I1205 17:17:51.399857 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="minio-dev/minio" Dec 05 17:17:51 crc kubenswrapper[4753]: I1205 17:17:51.402661 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"minio-dev"/"openshift-service-ca.crt" Dec 05 17:17:51 crc kubenswrapper[4753]: I1205 17:17:51.404821 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"minio-dev"/"kube-root-ca.crt" Dec 05 17:17:51 crc kubenswrapper[4753]: I1205 17:17:51.419592 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["minio-dev/minio"] Dec 05 17:17:51 crc kubenswrapper[4753]: I1205 17:17:51.526962 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h7nrn" event={"ID":"9f941c89-6e05-43a8-8667-ac7f1c799b83","Type":"ContainerStarted","Data":"c3503eb1183f79cd342a7c148de2bef742bca9af28502336e606db91e2cd152c"} Dec 05 17:17:51 crc kubenswrapper[4753]: I1205 17:17:51.566919 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-b2fc8b84-7edc-4ee6-bf46-d37e2718ba5e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b2fc8b84-7edc-4ee6-bf46-d37e2718ba5e\") pod \"minio\" (UID: \"90c36189-2580-4339-924e-c17f5ebb5d91\") " pod="minio-dev/minio" Dec 05 17:17:51 crc kubenswrapper[4753]: I1205 17:17:51.567075 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-57x6z\" (UniqueName: \"kubernetes.io/projected/90c36189-2580-4339-924e-c17f5ebb5d91-kube-api-access-57x6z\") pod \"minio\" (UID: \"90c36189-2580-4339-924e-c17f5ebb5d91\") " pod="minio-dev/minio" Dec 05 17:17:51 crc kubenswrapper[4753]: I1205 17:17:51.669118 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-57x6z\" (UniqueName: \"kubernetes.io/projected/90c36189-2580-4339-924e-c17f5ebb5d91-kube-api-access-57x6z\") pod \"minio\" (UID: \"90c36189-2580-4339-924e-c17f5ebb5d91\") " pod="minio-dev/minio" Dec 05 17:17:51 crc kubenswrapper[4753]: I1205 17:17:51.669242 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-b2fc8b84-7edc-4ee6-bf46-d37e2718ba5e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b2fc8b84-7edc-4ee6-bf46-d37e2718ba5e\") pod \"minio\" (UID: \"90c36189-2580-4339-924e-c17f5ebb5d91\") " pod="minio-dev/minio" Dec 05 17:17:51 crc kubenswrapper[4753]: I1205 17:17:51.676491 4753 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 05 17:17:51 crc kubenswrapper[4753]: I1205 17:17:51.676552 4753 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-b2fc8b84-7edc-4ee6-bf46-d37e2718ba5e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b2fc8b84-7edc-4ee6-bf46-d37e2718ba5e\") pod \"minio\" (UID: \"90c36189-2580-4339-924e-c17f5ebb5d91\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/5bb10f01de42887c6e10b06c52cc58c95c93413b419f70a6c479aa8cd391accc/globalmount\"" pod="minio-dev/minio" Dec 05 17:17:51 crc kubenswrapper[4753]: I1205 17:17:51.690638 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-57x6z\" (UniqueName: \"kubernetes.io/projected/90c36189-2580-4339-924e-c17f5ebb5d91-kube-api-access-57x6z\") pod \"minio\" (UID: \"90c36189-2580-4339-924e-c17f5ebb5d91\") " pod="minio-dev/minio" Dec 05 17:17:51 crc kubenswrapper[4753]: I1205 17:17:51.703335 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-b2fc8b84-7edc-4ee6-bf46-d37e2718ba5e\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b2fc8b84-7edc-4ee6-bf46-d37e2718ba5e\") pod \"minio\" (UID: \"90c36189-2580-4339-924e-c17f5ebb5d91\") " pod="minio-dev/minio" Dec 05 17:17:51 crc kubenswrapper[4753]: I1205 17:17:51.724650 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="minio-dev/minio" Dec 05 17:17:52 crc kubenswrapper[4753]: I1205 17:17:52.009109 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["minio-dev/minio"] Dec 05 17:17:52 crc kubenswrapper[4753]: I1205 17:17:52.539083 4753 generic.go:334] "Generic (PLEG): container finished" podID="940d564a-350c-4480-beae-dda46c53287a" containerID="0f3adcda95e55a0cbc83889bb696bd1509ded1a963fed2652a435cd571360908" exitCode=0 Dec 05 17:17:52 crc kubenswrapper[4753]: I1205 17:17:52.539334 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c16swx2" event={"ID":"940d564a-350c-4480-beae-dda46c53287a","Type":"ContainerDied","Data":"0f3adcda95e55a0cbc83889bb696bd1509ded1a963fed2652a435cd571360908"} Dec 05 17:17:52 crc kubenswrapper[4753]: I1205 17:17:52.541454 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="minio-dev/minio" event={"ID":"90c36189-2580-4339-924e-c17f5ebb5d91","Type":"ContainerStarted","Data":"c48bd03d569228bbea678ea318ba697942a6cac89ff941c19472be012fe0acd8"} Dec 05 17:17:52 crc kubenswrapper[4753]: I1205 17:17:52.544656 4753 generic.go:334] "Generic (PLEG): container finished" podID="9f941c89-6e05-43a8-8667-ac7f1c799b83" containerID="c3503eb1183f79cd342a7c148de2bef742bca9af28502336e606db91e2cd152c" exitCode=0 Dec 05 17:17:52 crc kubenswrapper[4753]: I1205 17:17:52.544720 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h7nrn" event={"ID":"9f941c89-6e05-43a8-8667-ac7f1c799b83","Type":"ContainerDied","Data":"c3503eb1183f79cd342a7c148de2bef742bca9af28502336e606db91e2cd152c"} Dec 05 17:17:53 crc kubenswrapper[4753]: I1205 17:17:53.553634 4753 generic.go:334] "Generic (PLEG): container finished" podID="940d564a-350c-4480-beae-dda46c53287a" containerID="0d7d8b98fa5cc6ffb3eeef70c87bb444acef1296ca29325a990ea339329a6217" exitCode=0 Dec 05 17:17:53 crc kubenswrapper[4753]: I1205 17:17:53.553708 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c16swx2" event={"ID":"940d564a-350c-4480-beae-dda46c53287a","Type":"ContainerDied","Data":"0d7d8b98fa5cc6ffb3eeef70c87bb444acef1296ca29325a990ea339329a6217"} Dec 05 17:17:53 crc kubenswrapper[4753]: I1205 17:17:53.557780 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h7nrn" event={"ID":"9f941c89-6e05-43a8-8667-ac7f1c799b83","Type":"ContainerStarted","Data":"44d2ff4ce439bada104d47409bbd0065623da617eaaf8fe6884dbc1787b11305"} Dec 05 17:17:53 crc kubenswrapper[4753]: I1205 17:17:53.604951 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-h7nrn" podStartSLOduration=2.207489816 podStartE2EDuration="4.604924502s" podCreationTimestamp="2025-12-05 17:17:49 +0000 UTC" firstStartedPulling="2025-12-05 17:17:50.521654471 +0000 UTC m=+809.024761477" lastFinishedPulling="2025-12-05 17:17:52.919089157 +0000 UTC m=+811.422196163" observedRunningTime="2025-12-05 17:17:53.600374881 +0000 UTC m=+812.103481887" watchObservedRunningTime="2025-12-05 17:17:53.604924502 +0000 UTC m=+812.108031508" Dec 05 17:17:55 crc kubenswrapper[4753]: I1205 17:17:55.668333 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c16swx2" Dec 05 17:17:55 crc kubenswrapper[4753]: I1205 17:17:55.837963 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/940d564a-350c-4480-beae-dda46c53287a-bundle\") pod \"940d564a-350c-4480-beae-dda46c53287a\" (UID: \"940d564a-350c-4480-beae-dda46c53287a\") " Dec 05 17:17:55 crc kubenswrapper[4753]: I1205 17:17:55.838100 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/940d564a-350c-4480-beae-dda46c53287a-util\") pod \"940d564a-350c-4480-beae-dda46c53287a\" (UID: \"940d564a-350c-4480-beae-dda46c53287a\") " Dec 05 17:17:55 crc kubenswrapper[4753]: I1205 17:17:55.838188 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lxdbx\" (UniqueName: \"kubernetes.io/projected/940d564a-350c-4480-beae-dda46c53287a-kube-api-access-lxdbx\") pod \"940d564a-350c-4480-beae-dda46c53287a\" (UID: \"940d564a-350c-4480-beae-dda46c53287a\") " Dec 05 17:17:55 crc kubenswrapper[4753]: I1205 17:17:55.840430 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/940d564a-350c-4480-beae-dda46c53287a-bundle" (OuterVolumeSpecName: "bundle") pod "940d564a-350c-4480-beae-dda46c53287a" (UID: "940d564a-350c-4480-beae-dda46c53287a"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:17:55 crc kubenswrapper[4753]: I1205 17:17:55.849548 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/940d564a-350c-4480-beae-dda46c53287a-util" (OuterVolumeSpecName: "util") pod "940d564a-350c-4480-beae-dda46c53287a" (UID: "940d564a-350c-4480-beae-dda46c53287a"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:17:55 crc kubenswrapper[4753]: I1205 17:17:55.863777 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/940d564a-350c-4480-beae-dda46c53287a-kube-api-access-lxdbx" (OuterVolumeSpecName: "kube-api-access-lxdbx") pod "940d564a-350c-4480-beae-dda46c53287a" (UID: "940d564a-350c-4480-beae-dda46c53287a"). InnerVolumeSpecName "kube-api-access-lxdbx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:17:55 crc kubenswrapper[4753]: I1205 17:17:55.939922 4753 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/940d564a-350c-4480-beae-dda46c53287a-util\") on node \"crc\" DevicePath \"\"" Dec 05 17:17:55 crc kubenswrapper[4753]: I1205 17:17:55.939981 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lxdbx\" (UniqueName: \"kubernetes.io/projected/940d564a-350c-4480-beae-dda46c53287a-kube-api-access-lxdbx\") on node \"crc\" DevicePath \"\"" Dec 05 17:17:55 crc kubenswrapper[4753]: I1205 17:17:55.939994 4753 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/940d564a-350c-4480-beae-dda46c53287a-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:17:56 crc kubenswrapper[4753]: I1205 17:17:56.592204 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c16swx2" event={"ID":"940d564a-350c-4480-beae-dda46c53287a","Type":"ContainerDied","Data":"1e1e075250252361e99fb20701247b1c8f710addc5f22855a15bb3210e3316b1"} Dec 05 17:17:56 crc kubenswrapper[4753]: I1205 17:17:56.592510 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1e1e075250252361e99fb20701247b1c8f710addc5f22855a15bb3210e3316b1" Dec 05 17:17:56 crc kubenswrapper[4753]: I1205 17:17:56.592584 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c16swx2" Dec 05 17:17:56 crc kubenswrapper[4753]: I1205 17:17:56.595370 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="minio-dev/minio" event={"ID":"90c36189-2580-4339-924e-c17f5ebb5d91","Type":"ContainerStarted","Data":"896f71d2204cde2fb13a30208ebcf19807896533218a44b729ae35c22c126242"} Dec 05 17:17:56 crc kubenswrapper[4753]: I1205 17:17:56.618273 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="minio-dev/minio" podStartSLOduration=4.667736497 podStartE2EDuration="8.618243459s" podCreationTimestamp="2025-12-05 17:17:48 +0000 UTC" firstStartedPulling="2025-12-05 17:17:52.040028836 +0000 UTC m=+810.543135842" lastFinishedPulling="2025-12-05 17:17:55.990535798 +0000 UTC m=+814.493642804" observedRunningTime="2025-12-05 17:17:56.613200694 +0000 UTC m=+815.116307700" watchObservedRunningTime="2025-12-05 17:17:56.618243459 +0000 UTC m=+815.121350465" Dec 05 17:17:58 crc kubenswrapper[4753]: I1205 17:17:58.979403 4753 patch_prober.go:28] interesting pod/machine-config-daemon-khn68 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 17:17:58 crc kubenswrapper[4753]: I1205 17:17:58.979473 4753 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 17:17:59 crc kubenswrapper[4753]: I1205 17:17:59.667458 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-h7nrn" Dec 05 17:17:59 crc kubenswrapper[4753]: I1205 17:17:59.667833 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-h7nrn" Dec 05 17:17:59 crc kubenswrapper[4753]: I1205 17:17:59.715941 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-h7nrn" Dec 05 17:18:00 crc kubenswrapper[4753]: I1205 17:18:00.683280 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-h7nrn" Dec 05 17:18:01 crc kubenswrapper[4753]: I1205 17:18:01.162460 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators-redhat/loki-operator-controller-manager-69686586d4-rttvr"] Dec 05 17:18:01 crc kubenswrapper[4753]: E1205 17:18:01.163360 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="940d564a-350c-4480-beae-dda46c53287a" containerName="util" Dec 05 17:18:01 crc kubenswrapper[4753]: I1205 17:18:01.163389 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="940d564a-350c-4480-beae-dda46c53287a" containerName="util" Dec 05 17:18:01 crc kubenswrapper[4753]: E1205 17:18:01.163414 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="940d564a-350c-4480-beae-dda46c53287a" containerName="extract" Dec 05 17:18:01 crc kubenswrapper[4753]: I1205 17:18:01.163423 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="940d564a-350c-4480-beae-dda46c53287a" containerName="extract" Dec 05 17:18:01 crc kubenswrapper[4753]: E1205 17:18:01.163438 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="940d564a-350c-4480-beae-dda46c53287a" containerName="pull" Dec 05 17:18:01 crc kubenswrapper[4753]: I1205 17:18:01.163446 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="940d564a-350c-4480-beae-dda46c53287a" containerName="pull" Dec 05 17:18:01 crc kubenswrapper[4753]: I1205 17:18:01.163607 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="940d564a-350c-4480-beae-dda46c53287a" containerName="extract" Dec 05 17:18:01 crc kubenswrapper[4753]: I1205 17:18:01.164629 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators-redhat/loki-operator-controller-manager-69686586d4-rttvr" Dec 05 17:18:01 crc kubenswrapper[4753]: I1205 17:18:01.167934 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators-redhat"/"kube-root-ca.crt" Dec 05 17:18:01 crc kubenswrapper[4753]: I1205 17:18:01.168328 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators-redhat"/"loki-operator-manager-config" Dec 05 17:18:01 crc kubenswrapper[4753]: I1205 17:18:01.168713 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators-redhat"/"loki-operator-controller-manager-service-cert" Dec 05 17:18:01 crc kubenswrapper[4753]: I1205 17:18:01.168850 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators-redhat"/"loki-operator-metrics" Dec 05 17:18:01 crc kubenswrapper[4753]: I1205 17:18:01.169585 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators-redhat"/"loki-operator-controller-manager-dockercfg-d848h" Dec 05 17:18:01 crc kubenswrapper[4753]: I1205 17:18:01.170584 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators-redhat"/"openshift-service-ca.crt" Dec 05 17:18:01 crc kubenswrapper[4753]: I1205 17:18:01.189789 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators-redhat/loki-operator-controller-manager-69686586d4-rttvr"] Dec 05 17:18:01 crc kubenswrapper[4753]: I1205 17:18:01.235885 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-grx6b\" (UniqueName: \"kubernetes.io/projected/2d74ebb5-4059-4fcd-beef-f9e7bd2731d4-kube-api-access-grx6b\") pod \"loki-operator-controller-manager-69686586d4-rttvr\" (UID: \"2d74ebb5-4059-4fcd-beef-f9e7bd2731d4\") " pod="openshift-operators-redhat/loki-operator-controller-manager-69686586d4-rttvr" Dec 05 17:18:01 crc kubenswrapper[4753]: I1205 17:18:01.235975 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/2d74ebb5-4059-4fcd-beef-f9e7bd2731d4-webhook-cert\") pod \"loki-operator-controller-manager-69686586d4-rttvr\" (UID: \"2d74ebb5-4059-4fcd-beef-f9e7bd2731d4\") " pod="openshift-operators-redhat/loki-operator-controller-manager-69686586d4-rttvr" Dec 05 17:18:01 crc kubenswrapper[4753]: I1205 17:18:01.236002 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/2d74ebb5-4059-4fcd-beef-f9e7bd2731d4-apiservice-cert\") pod \"loki-operator-controller-manager-69686586d4-rttvr\" (UID: \"2d74ebb5-4059-4fcd-beef-f9e7bd2731d4\") " pod="openshift-operators-redhat/loki-operator-controller-manager-69686586d4-rttvr" Dec 05 17:18:01 crc kubenswrapper[4753]: I1205 17:18:01.236288 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manager-config\" (UniqueName: \"kubernetes.io/configmap/2d74ebb5-4059-4fcd-beef-f9e7bd2731d4-manager-config\") pod \"loki-operator-controller-manager-69686586d4-rttvr\" (UID: \"2d74ebb5-4059-4fcd-beef-f9e7bd2731d4\") " pod="openshift-operators-redhat/loki-operator-controller-manager-69686586d4-rttvr" Dec 05 17:18:01 crc kubenswrapper[4753]: I1205 17:18:01.236375 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"loki-operator-metrics-cert\" (UniqueName: \"kubernetes.io/secret/2d74ebb5-4059-4fcd-beef-f9e7bd2731d4-loki-operator-metrics-cert\") pod \"loki-operator-controller-manager-69686586d4-rttvr\" (UID: \"2d74ebb5-4059-4fcd-beef-f9e7bd2731d4\") " pod="openshift-operators-redhat/loki-operator-controller-manager-69686586d4-rttvr" Dec 05 17:18:01 crc kubenswrapper[4753]: I1205 17:18:01.338342 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/2d74ebb5-4059-4fcd-beef-f9e7bd2731d4-apiservice-cert\") pod \"loki-operator-controller-manager-69686586d4-rttvr\" (UID: \"2d74ebb5-4059-4fcd-beef-f9e7bd2731d4\") " pod="openshift-operators-redhat/loki-operator-controller-manager-69686586d4-rttvr" Dec 05 17:18:01 crc kubenswrapper[4753]: I1205 17:18:01.338397 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/2d74ebb5-4059-4fcd-beef-f9e7bd2731d4-webhook-cert\") pod \"loki-operator-controller-manager-69686586d4-rttvr\" (UID: \"2d74ebb5-4059-4fcd-beef-f9e7bd2731d4\") " pod="openshift-operators-redhat/loki-operator-controller-manager-69686586d4-rttvr" Dec 05 17:18:01 crc kubenswrapper[4753]: I1205 17:18:01.338453 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manager-config\" (UniqueName: \"kubernetes.io/configmap/2d74ebb5-4059-4fcd-beef-f9e7bd2731d4-manager-config\") pod \"loki-operator-controller-manager-69686586d4-rttvr\" (UID: \"2d74ebb5-4059-4fcd-beef-f9e7bd2731d4\") " pod="openshift-operators-redhat/loki-operator-controller-manager-69686586d4-rttvr" Dec 05 17:18:01 crc kubenswrapper[4753]: I1205 17:18:01.338487 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"loki-operator-metrics-cert\" (UniqueName: \"kubernetes.io/secret/2d74ebb5-4059-4fcd-beef-f9e7bd2731d4-loki-operator-metrics-cert\") pod \"loki-operator-controller-manager-69686586d4-rttvr\" (UID: \"2d74ebb5-4059-4fcd-beef-f9e7bd2731d4\") " pod="openshift-operators-redhat/loki-operator-controller-manager-69686586d4-rttvr" Dec 05 17:18:01 crc kubenswrapper[4753]: I1205 17:18:01.338552 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-grx6b\" (UniqueName: \"kubernetes.io/projected/2d74ebb5-4059-4fcd-beef-f9e7bd2731d4-kube-api-access-grx6b\") pod \"loki-operator-controller-manager-69686586d4-rttvr\" (UID: \"2d74ebb5-4059-4fcd-beef-f9e7bd2731d4\") " pod="openshift-operators-redhat/loki-operator-controller-manager-69686586d4-rttvr" Dec 05 17:18:01 crc kubenswrapper[4753]: I1205 17:18:01.339840 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manager-config\" (UniqueName: \"kubernetes.io/configmap/2d74ebb5-4059-4fcd-beef-f9e7bd2731d4-manager-config\") pod \"loki-operator-controller-manager-69686586d4-rttvr\" (UID: \"2d74ebb5-4059-4fcd-beef-f9e7bd2731d4\") " pod="openshift-operators-redhat/loki-operator-controller-manager-69686586d4-rttvr" Dec 05 17:18:01 crc kubenswrapper[4753]: I1205 17:18:01.346164 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/2d74ebb5-4059-4fcd-beef-f9e7bd2731d4-apiservice-cert\") pod \"loki-operator-controller-manager-69686586d4-rttvr\" (UID: \"2d74ebb5-4059-4fcd-beef-f9e7bd2731d4\") " pod="openshift-operators-redhat/loki-operator-controller-manager-69686586d4-rttvr" Dec 05 17:18:01 crc kubenswrapper[4753]: I1205 17:18:01.350878 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/2d74ebb5-4059-4fcd-beef-f9e7bd2731d4-webhook-cert\") pod \"loki-operator-controller-manager-69686586d4-rttvr\" (UID: \"2d74ebb5-4059-4fcd-beef-f9e7bd2731d4\") " pod="openshift-operators-redhat/loki-operator-controller-manager-69686586d4-rttvr" Dec 05 17:18:01 crc kubenswrapper[4753]: I1205 17:18:01.356342 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"loki-operator-metrics-cert\" (UniqueName: \"kubernetes.io/secret/2d74ebb5-4059-4fcd-beef-f9e7bd2731d4-loki-operator-metrics-cert\") pod \"loki-operator-controller-manager-69686586d4-rttvr\" (UID: \"2d74ebb5-4059-4fcd-beef-f9e7bd2731d4\") " pod="openshift-operators-redhat/loki-operator-controller-manager-69686586d4-rttvr" Dec 05 17:18:01 crc kubenswrapper[4753]: I1205 17:18:01.368191 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-grx6b\" (UniqueName: \"kubernetes.io/projected/2d74ebb5-4059-4fcd-beef-f9e7bd2731d4-kube-api-access-grx6b\") pod \"loki-operator-controller-manager-69686586d4-rttvr\" (UID: \"2d74ebb5-4059-4fcd-beef-f9e7bd2731d4\") " pod="openshift-operators-redhat/loki-operator-controller-manager-69686586d4-rttvr" Dec 05 17:18:01 crc kubenswrapper[4753]: I1205 17:18:01.481349 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators-redhat/loki-operator-controller-manager-69686586d4-rttvr" Dec 05 17:18:01 crc kubenswrapper[4753]: I1205 17:18:01.751683 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators-redhat/loki-operator-controller-manager-69686586d4-rttvr"] Dec 05 17:18:01 crc kubenswrapper[4753]: I1205 17:18:01.756919 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-h7nrn"] Dec 05 17:18:02 crc kubenswrapper[4753]: I1205 17:18:02.639083 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators-redhat/loki-operator-controller-manager-69686586d4-rttvr" event={"ID":"2d74ebb5-4059-4fcd-beef-f9e7bd2731d4","Type":"ContainerStarted","Data":"31b788392a50e76efa42093c09259b389dda0ab1b6f894f8606886c5b5b8276a"} Dec 05 17:18:03 crc kubenswrapper[4753]: I1205 17:18:03.646774 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-h7nrn" podUID="9f941c89-6e05-43a8-8667-ac7f1c799b83" containerName="registry-server" containerID="cri-o://44d2ff4ce439bada104d47409bbd0065623da617eaaf8fe6884dbc1787b11305" gracePeriod=2 Dec 05 17:18:04 crc kubenswrapper[4753]: I1205 17:18:04.656488 4753 generic.go:334] "Generic (PLEG): container finished" podID="9f941c89-6e05-43a8-8667-ac7f1c799b83" containerID="44d2ff4ce439bada104d47409bbd0065623da617eaaf8fe6884dbc1787b11305" exitCode=0 Dec 05 17:18:04 crc kubenswrapper[4753]: I1205 17:18:04.656561 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h7nrn" event={"ID":"9f941c89-6e05-43a8-8667-ac7f1c799b83","Type":"ContainerDied","Data":"44d2ff4ce439bada104d47409bbd0065623da617eaaf8fe6884dbc1787b11305"} Dec 05 17:18:05 crc kubenswrapper[4753]: I1205 17:18:05.412451 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-h7nrn" Dec 05 17:18:05 crc kubenswrapper[4753]: I1205 17:18:05.508718 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lwhxb\" (UniqueName: \"kubernetes.io/projected/9f941c89-6e05-43a8-8667-ac7f1c799b83-kube-api-access-lwhxb\") pod \"9f941c89-6e05-43a8-8667-ac7f1c799b83\" (UID: \"9f941c89-6e05-43a8-8667-ac7f1c799b83\") " Dec 05 17:18:05 crc kubenswrapper[4753]: I1205 17:18:05.508813 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9f941c89-6e05-43a8-8667-ac7f1c799b83-utilities\") pod \"9f941c89-6e05-43a8-8667-ac7f1c799b83\" (UID: \"9f941c89-6e05-43a8-8667-ac7f1c799b83\") " Dec 05 17:18:05 crc kubenswrapper[4753]: I1205 17:18:05.508846 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9f941c89-6e05-43a8-8667-ac7f1c799b83-catalog-content\") pod \"9f941c89-6e05-43a8-8667-ac7f1c799b83\" (UID: \"9f941c89-6e05-43a8-8667-ac7f1c799b83\") " Dec 05 17:18:05 crc kubenswrapper[4753]: I1205 17:18:05.510225 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9f941c89-6e05-43a8-8667-ac7f1c799b83-utilities" (OuterVolumeSpecName: "utilities") pod "9f941c89-6e05-43a8-8667-ac7f1c799b83" (UID: "9f941c89-6e05-43a8-8667-ac7f1c799b83"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:18:05 crc kubenswrapper[4753]: I1205 17:18:05.519391 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9f941c89-6e05-43a8-8667-ac7f1c799b83-kube-api-access-lwhxb" (OuterVolumeSpecName: "kube-api-access-lwhxb") pod "9f941c89-6e05-43a8-8667-ac7f1c799b83" (UID: "9f941c89-6e05-43a8-8667-ac7f1c799b83"). InnerVolumeSpecName "kube-api-access-lwhxb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:18:05 crc kubenswrapper[4753]: I1205 17:18:05.532314 4753 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9f941c89-6e05-43a8-8667-ac7f1c799b83-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 17:18:05 crc kubenswrapper[4753]: I1205 17:18:05.532840 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lwhxb\" (UniqueName: \"kubernetes.io/projected/9f941c89-6e05-43a8-8667-ac7f1c799b83-kube-api-access-lwhxb\") on node \"crc\" DevicePath \"\"" Dec 05 17:18:05 crc kubenswrapper[4753]: I1205 17:18:05.638707 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9f941c89-6e05-43a8-8667-ac7f1c799b83-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9f941c89-6e05-43a8-8667-ac7f1c799b83" (UID: "9f941c89-6e05-43a8-8667-ac7f1c799b83"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:18:05 crc kubenswrapper[4753]: I1205 17:18:05.668170 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h7nrn" event={"ID":"9f941c89-6e05-43a8-8667-ac7f1c799b83","Type":"ContainerDied","Data":"bb4392296d0a81ad735c01fdbe3c1ba6814c6fec14be54c5d9a014ff2ab127c7"} Dec 05 17:18:05 crc kubenswrapper[4753]: I1205 17:18:05.668380 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-h7nrn" Dec 05 17:18:05 crc kubenswrapper[4753]: I1205 17:18:05.668586 4753 scope.go:117] "RemoveContainer" containerID="44d2ff4ce439bada104d47409bbd0065623da617eaaf8fe6884dbc1787b11305" Dec 05 17:18:05 crc kubenswrapper[4753]: I1205 17:18:05.700336 4753 scope.go:117] "RemoveContainer" containerID="c3503eb1183f79cd342a7c148de2bef742bca9af28502336e606db91e2cd152c" Dec 05 17:18:05 crc kubenswrapper[4753]: I1205 17:18:05.702178 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-h7nrn"] Dec 05 17:18:05 crc kubenswrapper[4753]: I1205 17:18:05.707377 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-h7nrn"] Dec 05 17:18:05 crc kubenswrapper[4753]: I1205 17:18:05.726749 4753 scope.go:117] "RemoveContainer" containerID="4e10b91765340c3f095e2f1824b44f1d76068b6210cc9fd15b5a2c54484bae79" Dec 05 17:18:05 crc kubenswrapper[4753]: I1205 17:18:05.738579 4753 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9f941c89-6e05-43a8-8667-ac7f1c799b83-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 17:18:05 crc kubenswrapper[4753]: I1205 17:18:05.740965 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9f941c89-6e05-43a8-8667-ac7f1c799b83" path="/var/lib/kubelet/pods/9f941c89-6e05-43a8-8667-ac7f1c799b83/volumes" Dec 05 17:18:09 crc kubenswrapper[4753]: I1205 17:18:09.701965 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators-redhat/loki-operator-controller-manager-69686586d4-rttvr" event={"ID":"2d74ebb5-4059-4fcd-beef-f9e7bd2731d4","Type":"ContainerStarted","Data":"ccd1ebd787d1739aafd54fd9217f8138396cbbcd04f444b0322304858b420bc6"} Dec 05 17:18:15 crc kubenswrapper[4753]: I1205 17:18:15.774022 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators-redhat/loki-operator-controller-manager-69686586d4-rttvr" event={"ID":"2d74ebb5-4059-4fcd-beef-f9e7bd2731d4","Type":"ContainerStarted","Data":"b611b7b7ad0a49e3b684557a725417539ba08567420590534bfd6a0cb530e27a"} Dec 05 17:18:15 crc kubenswrapper[4753]: I1205 17:18:15.774654 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators-redhat/loki-operator-controller-manager-69686586d4-rttvr" Dec 05 17:18:15 crc kubenswrapper[4753]: I1205 17:18:15.778212 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators-redhat/loki-operator-controller-manager-69686586d4-rttvr" Dec 05 17:18:15 crc kubenswrapper[4753]: I1205 17:18:15.813454 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators-redhat/loki-operator-controller-manager-69686586d4-rttvr" podStartSLOduration=1.605718126 podStartE2EDuration="14.813424417s" podCreationTimestamp="2025-12-05 17:18:01 +0000 UTC" firstStartedPulling="2025-12-05 17:18:01.736331613 +0000 UTC m=+820.239438619" lastFinishedPulling="2025-12-05 17:18:14.944037874 +0000 UTC m=+833.447144910" observedRunningTime="2025-12-05 17:18:15.807592029 +0000 UTC m=+834.310699055" watchObservedRunningTime="2025-12-05 17:18:15.813424417 +0000 UTC m=+834.316531433" Dec 05 17:18:28 crc kubenswrapper[4753]: I1205 17:18:28.979345 4753 patch_prober.go:28] interesting pod/machine-config-daemon-khn68 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 17:18:28 crc kubenswrapper[4753]: I1205 17:18:28.980334 4753 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 17:18:28 crc kubenswrapper[4753]: I1205 17:18:28.980419 4753 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-khn68" Dec 05 17:18:28 crc kubenswrapper[4753]: I1205 17:18:28.981565 4753 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"bba70de976ea341407c3eb9e1abd5171c31bb8762b60796e6f36abd1b897024d"} pod="openshift-machine-config-operator/machine-config-daemon-khn68" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 17:18:28 crc kubenswrapper[4753]: I1205 17:18:28.981661 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerName="machine-config-daemon" containerID="cri-o://bba70de976ea341407c3eb9e1abd5171c31bb8762b60796e6f36abd1b897024d" gracePeriod=600 Dec 05 17:18:29 crc kubenswrapper[4753]: I1205 17:18:29.889726 4753 generic.go:334] "Generic (PLEG): container finished" podID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerID="bba70de976ea341407c3eb9e1abd5171c31bb8762b60796e6f36abd1b897024d" exitCode=0 Dec 05 17:18:29 crc kubenswrapper[4753]: I1205 17:18:29.889798 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" event={"ID":"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68","Type":"ContainerDied","Data":"bba70de976ea341407c3eb9e1abd5171c31bb8762b60796e6f36abd1b897024d"} Dec 05 17:18:29 crc kubenswrapper[4753]: I1205 17:18:29.890259 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" event={"ID":"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68","Type":"ContainerStarted","Data":"7ca9474d05ab84ff7ea8a657d534bd38e1cef3f72237d6a798a2a1e2c1383e43"} Dec 05 17:18:29 crc kubenswrapper[4753]: I1205 17:18:29.890292 4753 scope.go:117] "RemoveContainer" containerID="048cd1f4edd965d0ae4cd5cc208b03c56a5a2a1c72e0809d8cf01232add1c614" Dec 05 17:18:55 crc kubenswrapper[4753]: I1205 17:18:55.633685 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-hn7ll"] Dec 05 17:18:55 crc kubenswrapper[4753]: E1205 17:18:55.634630 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f941c89-6e05-43a8-8667-ac7f1c799b83" containerName="registry-server" Dec 05 17:18:55 crc kubenswrapper[4753]: I1205 17:18:55.634647 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f941c89-6e05-43a8-8667-ac7f1c799b83" containerName="registry-server" Dec 05 17:18:55 crc kubenswrapper[4753]: E1205 17:18:55.634662 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f941c89-6e05-43a8-8667-ac7f1c799b83" containerName="extract-utilities" Dec 05 17:18:55 crc kubenswrapper[4753]: I1205 17:18:55.634671 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f941c89-6e05-43a8-8667-ac7f1c799b83" containerName="extract-utilities" Dec 05 17:18:55 crc kubenswrapper[4753]: E1205 17:18:55.634685 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f941c89-6e05-43a8-8667-ac7f1c799b83" containerName="extract-content" Dec 05 17:18:55 crc kubenswrapper[4753]: I1205 17:18:55.634694 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f941c89-6e05-43a8-8667-ac7f1c799b83" containerName="extract-content" Dec 05 17:18:55 crc kubenswrapper[4753]: I1205 17:18:55.634837 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="9f941c89-6e05-43a8-8667-ac7f1c799b83" containerName="registry-server" Dec 05 17:18:55 crc kubenswrapper[4753]: I1205 17:18:55.635926 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hn7ll" Dec 05 17:18:55 crc kubenswrapper[4753]: I1205 17:18:55.671965 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-hn7ll"] Dec 05 17:18:55 crc kubenswrapper[4753]: I1205 17:18:55.803873 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/426cb0ae-48a4-401b-9148-025568a01eaa-catalog-content\") pod \"redhat-marketplace-hn7ll\" (UID: \"426cb0ae-48a4-401b-9148-025568a01eaa\") " pod="openshift-marketplace/redhat-marketplace-hn7ll" Dec 05 17:18:55 crc kubenswrapper[4753]: I1205 17:18:55.803932 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mv8b4\" (UniqueName: \"kubernetes.io/projected/426cb0ae-48a4-401b-9148-025568a01eaa-kube-api-access-mv8b4\") pod \"redhat-marketplace-hn7ll\" (UID: \"426cb0ae-48a4-401b-9148-025568a01eaa\") " pod="openshift-marketplace/redhat-marketplace-hn7ll" Dec 05 17:18:55 crc kubenswrapper[4753]: I1205 17:18:55.804289 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/426cb0ae-48a4-401b-9148-025568a01eaa-utilities\") pod \"redhat-marketplace-hn7ll\" (UID: \"426cb0ae-48a4-401b-9148-025568a01eaa\") " pod="openshift-marketplace/redhat-marketplace-hn7ll" Dec 05 17:18:55 crc kubenswrapper[4753]: I1205 17:18:55.905546 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/426cb0ae-48a4-401b-9148-025568a01eaa-utilities\") pod \"redhat-marketplace-hn7ll\" (UID: \"426cb0ae-48a4-401b-9148-025568a01eaa\") " pod="openshift-marketplace/redhat-marketplace-hn7ll" Dec 05 17:18:55 crc kubenswrapper[4753]: I1205 17:18:55.905633 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/426cb0ae-48a4-401b-9148-025568a01eaa-catalog-content\") pod \"redhat-marketplace-hn7ll\" (UID: \"426cb0ae-48a4-401b-9148-025568a01eaa\") " pod="openshift-marketplace/redhat-marketplace-hn7ll" Dec 05 17:18:55 crc kubenswrapper[4753]: I1205 17:18:55.905657 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mv8b4\" (UniqueName: \"kubernetes.io/projected/426cb0ae-48a4-401b-9148-025568a01eaa-kube-api-access-mv8b4\") pod \"redhat-marketplace-hn7ll\" (UID: \"426cb0ae-48a4-401b-9148-025568a01eaa\") " pod="openshift-marketplace/redhat-marketplace-hn7ll" Dec 05 17:18:55 crc kubenswrapper[4753]: I1205 17:18:55.906296 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/426cb0ae-48a4-401b-9148-025568a01eaa-utilities\") pod \"redhat-marketplace-hn7ll\" (UID: \"426cb0ae-48a4-401b-9148-025568a01eaa\") " pod="openshift-marketplace/redhat-marketplace-hn7ll" Dec 05 17:18:55 crc kubenswrapper[4753]: I1205 17:18:55.906522 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/426cb0ae-48a4-401b-9148-025568a01eaa-catalog-content\") pod \"redhat-marketplace-hn7ll\" (UID: \"426cb0ae-48a4-401b-9148-025568a01eaa\") " pod="openshift-marketplace/redhat-marketplace-hn7ll" Dec 05 17:18:55 crc kubenswrapper[4753]: I1205 17:18:55.927439 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mv8b4\" (UniqueName: \"kubernetes.io/projected/426cb0ae-48a4-401b-9148-025568a01eaa-kube-api-access-mv8b4\") pod \"redhat-marketplace-hn7ll\" (UID: \"426cb0ae-48a4-401b-9148-025568a01eaa\") " pod="openshift-marketplace/redhat-marketplace-hn7ll" Dec 05 17:18:55 crc kubenswrapper[4753]: I1205 17:18:55.954716 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hn7ll" Dec 05 17:18:56 crc kubenswrapper[4753]: I1205 17:18:56.237444 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-hn7ll"] Dec 05 17:18:57 crc kubenswrapper[4753]: I1205 17:18:57.145905 4753 generic.go:334] "Generic (PLEG): container finished" podID="426cb0ae-48a4-401b-9148-025568a01eaa" containerID="289c3bfd74234230a82966710967751fa85d309e31367c437641dc61c8c56905" exitCode=0 Dec 05 17:18:57 crc kubenswrapper[4753]: I1205 17:18:57.146009 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hn7ll" event={"ID":"426cb0ae-48a4-401b-9148-025568a01eaa","Type":"ContainerDied","Data":"289c3bfd74234230a82966710967751fa85d309e31367c437641dc61c8c56905"} Dec 05 17:18:57 crc kubenswrapper[4753]: I1205 17:18:57.146384 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hn7ll" event={"ID":"426cb0ae-48a4-401b-9148-025568a01eaa","Type":"ContainerStarted","Data":"b4cf5b9bd0ec2947d80dcc2f114d66c6163e69deca92375446dd7b216b8b2122"} Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.012032 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210q5h8h"] Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.020870 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/6c372a8d094fad7255d3bbeabb4914bd2356af7b203a2d2176be1c9210q5h8h"] Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.028009 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c16swx2"] Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.032457 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/7b5aa1f5b38b68c96e281700110eb6f32773ca4b2682978fa6f2ffb2c16swx2"] Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.037326 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-s6f9l"] Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.037724 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-s6f9l" podUID="2a300708-258b-4ba6-b7c1-c46b90c8ec36" containerName="registry-server" containerID="cri-o://2e3b27412ddd8a10cc4067e8f8da96e32b225773c7ac8787ecae7f5cb075dd3d" gracePeriod=30 Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.050431 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-hh2kd"] Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.050766 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-hh2kd" podUID="b7552952-65cd-4686-89de-8cd96ba599f2" containerName="registry-server" containerID="cri-o://22a898834ab748a70de92d420109c304a69655763abd1f51589b399d0a9b760a" gracePeriod=30 Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.060950 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-8rtl5"] Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.061383 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-8rtl5" podUID="7aeabdb9-1aef-44d2-85e5-c17fcb4290be" containerName="marketplace-operator" containerID="cri-o://5dda004f3240e21696f38a0796f2340a1651e251747a0f94bba8e94e5dbb8a4c" gracePeriod=30 Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.072248 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-dbnjm"] Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.072632 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-dbnjm" podUID="2a7a8437-e7be-454d-bc02-71af554d390b" containerName="registry-server" containerID="cri-o://a9d3865af6941b82209ba4c5de476f72c8f223245c848bde840b1276f8a8578f" gracePeriod=30 Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.129473 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-hn7ll"] Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.138735 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-lk7z9"] Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.139443 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-lk7z9" podUID="f35bd233-6e5b-4099-8da5-2dda1519d793" containerName="registry-server" containerID="cri-o://3aec93846696e4790cb572c3a09bbc6fd6c845e2686ba965ad33853529730f9d" gracePeriod=30 Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.148033 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-kr92j"] Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.151931 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-kr92j" Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.170139 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-kr92j"] Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.243974 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/303b3266-3775-4fcb-aac9-432b1fefaedc-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-kr92j\" (UID: \"303b3266-3775-4fcb-aac9-432b1fefaedc\") " pod="openshift-marketplace/marketplace-operator-79b997595-kr92j" Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.244049 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x7mbw\" (UniqueName: \"kubernetes.io/projected/303b3266-3775-4fcb-aac9-432b1fefaedc-kube-api-access-x7mbw\") pod \"marketplace-operator-79b997595-kr92j\" (UID: \"303b3266-3775-4fcb-aac9-432b1fefaedc\") " pod="openshift-marketplace/marketplace-operator-79b997595-kr92j" Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.244079 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/303b3266-3775-4fcb-aac9-432b1fefaedc-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-kr92j\" (UID: \"303b3266-3775-4fcb-aac9-432b1fefaedc\") " pod="openshift-marketplace/marketplace-operator-79b997595-kr92j" Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.346053 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/303b3266-3775-4fcb-aac9-432b1fefaedc-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-kr92j\" (UID: \"303b3266-3775-4fcb-aac9-432b1fefaedc\") " pod="openshift-marketplace/marketplace-operator-79b997595-kr92j" Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.346133 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x7mbw\" (UniqueName: \"kubernetes.io/projected/303b3266-3775-4fcb-aac9-432b1fefaedc-kube-api-access-x7mbw\") pod \"marketplace-operator-79b997595-kr92j\" (UID: \"303b3266-3775-4fcb-aac9-432b1fefaedc\") " pod="openshift-marketplace/marketplace-operator-79b997595-kr92j" Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.346218 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/303b3266-3775-4fcb-aac9-432b1fefaedc-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-kr92j\" (UID: \"303b3266-3775-4fcb-aac9-432b1fefaedc\") " pod="openshift-marketplace/marketplace-operator-79b997595-kr92j" Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.348211 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/303b3266-3775-4fcb-aac9-432b1fefaedc-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-kr92j\" (UID: \"303b3266-3775-4fcb-aac9-432b1fefaedc\") " pod="openshift-marketplace/marketplace-operator-79b997595-kr92j" Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.362674 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/303b3266-3775-4fcb-aac9-432b1fefaedc-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-kr92j\" (UID: \"303b3266-3775-4fcb-aac9-432b1fefaedc\") " pod="openshift-marketplace/marketplace-operator-79b997595-kr92j" Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.367687 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x7mbw\" (UniqueName: \"kubernetes.io/projected/303b3266-3775-4fcb-aac9-432b1fefaedc-kube-api-access-x7mbw\") pod \"marketplace-operator-79b997595-kr92j\" (UID: \"303b3266-3775-4fcb-aac9-432b1fefaedc\") " pod="openshift-marketplace/marketplace-operator-79b997595-kr92j" Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.611845 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-kr92j" Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.616927 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hh2kd" Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.656711 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-s6f9l" Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.672302 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-8rtl5" Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.687233 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lk7z9" Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.701875 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dbnjm" Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.756794 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7aeabdb9-1aef-44d2-85e5-c17fcb4290be-marketplace-trusted-ca\") pod \"7aeabdb9-1aef-44d2-85e5-c17fcb4290be\" (UID: \"7aeabdb9-1aef-44d2-85e5-c17fcb4290be\") " Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.756866 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b7552952-65cd-4686-89de-8cd96ba599f2-utilities\") pod \"b7552952-65cd-4686-89de-8cd96ba599f2\" (UID: \"b7552952-65cd-4686-89de-8cd96ba599f2\") " Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.756900 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2a300708-258b-4ba6-b7c1-c46b90c8ec36-utilities\") pod \"2a300708-258b-4ba6-b7c1-c46b90c8ec36\" (UID: \"2a300708-258b-4ba6-b7c1-c46b90c8ec36\") " Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.756927 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sbvm2\" (UniqueName: \"kubernetes.io/projected/7aeabdb9-1aef-44d2-85e5-c17fcb4290be-kube-api-access-sbvm2\") pod \"7aeabdb9-1aef-44d2-85e5-c17fcb4290be\" (UID: \"7aeabdb9-1aef-44d2-85e5-c17fcb4290be\") " Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.756954 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b7552952-65cd-4686-89de-8cd96ba599f2-catalog-content\") pod \"b7552952-65cd-4686-89de-8cd96ba599f2\" (UID: \"b7552952-65cd-4686-89de-8cd96ba599f2\") " Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.756980 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/7aeabdb9-1aef-44d2-85e5-c17fcb4290be-marketplace-operator-metrics\") pod \"7aeabdb9-1aef-44d2-85e5-c17fcb4290be\" (UID: \"7aeabdb9-1aef-44d2-85e5-c17fcb4290be\") " Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.757034 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9h8k6\" (UniqueName: \"kubernetes.io/projected/b7552952-65cd-4686-89de-8cd96ba599f2-kube-api-access-9h8k6\") pod \"b7552952-65cd-4686-89de-8cd96ba599f2\" (UID: \"b7552952-65cd-4686-89de-8cd96ba599f2\") " Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.757085 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2a300708-258b-4ba6-b7c1-c46b90c8ec36-catalog-content\") pod \"2a300708-258b-4ba6-b7c1-c46b90c8ec36\" (UID: \"2a300708-258b-4ba6-b7c1-c46b90c8ec36\") " Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.757146 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nv58c\" (UniqueName: \"kubernetes.io/projected/2a300708-258b-4ba6-b7c1-c46b90c8ec36-kube-api-access-nv58c\") pod \"2a300708-258b-4ba6-b7c1-c46b90c8ec36\" (UID: \"2a300708-258b-4ba6-b7c1-c46b90c8ec36\") " Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.759068 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7aeabdb9-1aef-44d2-85e5-c17fcb4290be-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "7aeabdb9-1aef-44d2-85e5-c17fcb4290be" (UID: "7aeabdb9-1aef-44d2-85e5-c17fcb4290be"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.760040 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b7552952-65cd-4686-89de-8cd96ba599f2-utilities" (OuterVolumeSpecName: "utilities") pod "b7552952-65cd-4686-89de-8cd96ba599f2" (UID: "b7552952-65cd-4686-89de-8cd96ba599f2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.761085 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2a300708-258b-4ba6-b7c1-c46b90c8ec36-utilities" (OuterVolumeSpecName: "utilities") pod "2a300708-258b-4ba6-b7c1-c46b90c8ec36" (UID: "2a300708-258b-4ba6-b7c1-c46b90c8ec36"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.767280 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7aeabdb9-1aef-44d2-85e5-c17fcb4290be-kube-api-access-sbvm2" (OuterVolumeSpecName: "kube-api-access-sbvm2") pod "7aeabdb9-1aef-44d2-85e5-c17fcb4290be" (UID: "7aeabdb9-1aef-44d2-85e5-c17fcb4290be"). InnerVolumeSpecName "kube-api-access-sbvm2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.774571 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7aeabdb9-1aef-44d2-85e5-c17fcb4290be-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "7aeabdb9-1aef-44d2-85e5-c17fcb4290be" (UID: "7aeabdb9-1aef-44d2-85e5-c17fcb4290be"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.774713 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b7552952-65cd-4686-89de-8cd96ba599f2-kube-api-access-9h8k6" (OuterVolumeSpecName: "kube-api-access-9h8k6") pod "b7552952-65cd-4686-89de-8cd96ba599f2" (UID: "b7552952-65cd-4686-89de-8cd96ba599f2"). InnerVolumeSpecName "kube-api-access-9h8k6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.786462 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2a300708-258b-4ba6-b7c1-c46b90c8ec36-kube-api-access-nv58c" (OuterVolumeSpecName: "kube-api-access-nv58c") pod "2a300708-258b-4ba6-b7c1-c46b90c8ec36" (UID: "2a300708-258b-4ba6-b7c1-c46b90c8ec36"). InnerVolumeSpecName "kube-api-access-nv58c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.831080 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b7552952-65cd-4686-89de-8cd96ba599f2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b7552952-65cd-4686-89de-8cd96ba599f2" (UID: "b7552952-65cd-4686-89de-8cd96ba599f2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.832205 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2a300708-258b-4ba6-b7c1-c46b90c8ec36-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2a300708-258b-4ba6-b7c1-c46b90c8ec36" (UID: "2a300708-258b-4ba6-b7c1-c46b90c8ec36"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.858135 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2a7a8437-e7be-454d-bc02-71af554d390b-catalog-content\") pod \"2a7a8437-e7be-454d-bc02-71af554d390b\" (UID: \"2a7a8437-e7be-454d-bc02-71af554d390b\") " Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.858199 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-68f7p\" (UniqueName: \"kubernetes.io/projected/f35bd233-6e5b-4099-8da5-2dda1519d793-kube-api-access-68f7p\") pod \"f35bd233-6e5b-4099-8da5-2dda1519d793\" (UID: \"f35bd233-6e5b-4099-8da5-2dda1519d793\") " Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.858239 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f35bd233-6e5b-4099-8da5-2dda1519d793-catalog-content\") pod \"f35bd233-6e5b-4099-8da5-2dda1519d793\" (UID: \"f35bd233-6e5b-4099-8da5-2dda1519d793\") " Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.858286 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2a7a8437-e7be-454d-bc02-71af554d390b-utilities\") pod \"2a7a8437-e7be-454d-bc02-71af554d390b\" (UID: \"2a7a8437-e7be-454d-bc02-71af554d390b\") " Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.858319 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2ktnl\" (UniqueName: \"kubernetes.io/projected/2a7a8437-e7be-454d-bc02-71af554d390b-kube-api-access-2ktnl\") pod \"2a7a8437-e7be-454d-bc02-71af554d390b\" (UID: \"2a7a8437-e7be-454d-bc02-71af554d390b\") " Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.858469 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f35bd233-6e5b-4099-8da5-2dda1519d793-utilities\") pod \"f35bd233-6e5b-4099-8da5-2dda1519d793\" (UID: \"f35bd233-6e5b-4099-8da5-2dda1519d793\") " Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.859067 4753 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2a300708-258b-4ba6-b7c1-c46b90c8ec36-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.859087 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nv58c\" (UniqueName: \"kubernetes.io/projected/2a300708-258b-4ba6-b7c1-c46b90c8ec36-kube-api-access-nv58c\") on node \"crc\" DevicePath \"\"" Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.859102 4753 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7aeabdb9-1aef-44d2-85e5-c17fcb4290be-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.859116 4753 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b7552952-65cd-4686-89de-8cd96ba599f2-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.859126 4753 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2a300708-258b-4ba6-b7c1-c46b90c8ec36-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.859137 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sbvm2\" (UniqueName: \"kubernetes.io/projected/7aeabdb9-1aef-44d2-85e5-c17fcb4290be-kube-api-access-sbvm2\") on node \"crc\" DevicePath \"\"" Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.859152 4753 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b7552952-65cd-4686-89de-8cd96ba599f2-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.859165 4753 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/7aeabdb9-1aef-44d2-85e5-c17fcb4290be-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.859195 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9h8k6\" (UniqueName: \"kubernetes.io/projected/b7552952-65cd-4686-89de-8cd96ba599f2-kube-api-access-9h8k6\") on node \"crc\" DevicePath \"\"" Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.859748 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f35bd233-6e5b-4099-8da5-2dda1519d793-utilities" (OuterVolumeSpecName: "utilities") pod "f35bd233-6e5b-4099-8da5-2dda1519d793" (UID: "f35bd233-6e5b-4099-8da5-2dda1519d793"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.860727 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2a7a8437-e7be-454d-bc02-71af554d390b-utilities" (OuterVolumeSpecName: "utilities") pod "2a7a8437-e7be-454d-bc02-71af554d390b" (UID: "2a7a8437-e7be-454d-bc02-71af554d390b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.862594 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2a7a8437-e7be-454d-bc02-71af554d390b-kube-api-access-2ktnl" (OuterVolumeSpecName: "kube-api-access-2ktnl") pod "2a7a8437-e7be-454d-bc02-71af554d390b" (UID: "2a7a8437-e7be-454d-bc02-71af554d390b"). InnerVolumeSpecName "kube-api-access-2ktnl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.867307 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f35bd233-6e5b-4099-8da5-2dda1519d793-kube-api-access-68f7p" (OuterVolumeSpecName: "kube-api-access-68f7p") pod "f35bd233-6e5b-4099-8da5-2dda1519d793" (UID: "f35bd233-6e5b-4099-8da5-2dda1519d793"). InnerVolumeSpecName "kube-api-access-68f7p". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.892391 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2a7a8437-e7be-454d-bc02-71af554d390b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2a7a8437-e7be-454d-bc02-71af554d390b" (UID: "2a7a8437-e7be-454d-bc02-71af554d390b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.961109 4753 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2a7a8437-e7be-454d-bc02-71af554d390b-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.961190 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-68f7p\" (UniqueName: \"kubernetes.io/projected/f35bd233-6e5b-4099-8da5-2dda1519d793-kube-api-access-68f7p\") on node \"crc\" DevicePath \"\"" Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.961209 4753 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2a7a8437-e7be-454d-bc02-71af554d390b-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.961221 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2ktnl\" (UniqueName: \"kubernetes.io/projected/2a7a8437-e7be-454d-bc02-71af554d390b-kube-api-access-2ktnl\") on node \"crc\" DevicePath \"\"" Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.961232 4753 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f35bd233-6e5b-4099-8da5-2dda1519d793-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 17:18:58 crc kubenswrapper[4753]: I1205 17:18:58.969877 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f35bd233-6e5b-4099-8da5-2dda1519d793-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f35bd233-6e5b-4099-8da5-2dda1519d793" (UID: "f35bd233-6e5b-4099-8da5-2dda1519d793"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.062641 4753 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f35bd233-6e5b-4099-8da5-2dda1519d793-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.069759 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-kr92j"] Dec 05 17:18:59 crc kubenswrapper[4753]: W1205 17:18:59.075909 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod303b3266_3775_4fcb_aac9_432b1fefaedc.slice/crio-b7aef6762b9172de52c01954b7eb03456a92b88ffe50934a41fc3a7cdd583d80 WatchSource:0}: Error finding container b7aef6762b9172de52c01954b7eb03456a92b88ffe50934a41fc3a7cdd583d80: Status 404 returned error can't find the container with id b7aef6762b9172de52c01954b7eb03456a92b88ffe50934a41fc3a7cdd583d80 Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.175126 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dbnjm" event={"ID":"2a7a8437-e7be-454d-bc02-71af554d390b","Type":"ContainerDied","Data":"a9d3865af6941b82209ba4c5de476f72c8f223245c848bde840b1276f8a8578f"} Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.175208 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-dbnjm" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.175698 4753 scope.go:117] "RemoveContainer" containerID="a9d3865af6941b82209ba4c5de476f72c8f223245c848bde840b1276f8a8578f" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.175036 4753 generic.go:334] "Generic (PLEG): container finished" podID="2a7a8437-e7be-454d-bc02-71af554d390b" containerID="a9d3865af6941b82209ba4c5de476f72c8f223245c848bde840b1276f8a8578f" exitCode=0 Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.175952 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-dbnjm" event={"ID":"2a7a8437-e7be-454d-bc02-71af554d390b","Type":"ContainerDied","Data":"d5b2a351a84c5947cb63e76f2d78082462ba26c8ad1c846706232dccbbe5e06e"} Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.191753 4753 generic.go:334] "Generic (PLEG): container finished" podID="2a300708-258b-4ba6-b7c1-c46b90c8ec36" containerID="2e3b27412ddd8a10cc4067e8f8da96e32b225773c7ac8787ecae7f5cb075dd3d" exitCode=0 Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.191900 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s6f9l" event={"ID":"2a300708-258b-4ba6-b7c1-c46b90c8ec36","Type":"ContainerDied","Data":"2e3b27412ddd8a10cc4067e8f8da96e32b225773c7ac8787ecae7f5cb075dd3d"} Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.191946 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-s6f9l" event={"ID":"2a300708-258b-4ba6-b7c1-c46b90c8ec36","Type":"ContainerDied","Data":"8199d3b42a5ccacb4ab68e7531591a2f4970b6e8bbe7155f401cfb63ee5749aa"} Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.192063 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-s6f9l" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.205825 4753 generic.go:334] "Generic (PLEG): container finished" podID="426cb0ae-48a4-401b-9148-025568a01eaa" containerID="1567e2665a3359ebd4248a67da4b5719ac593b037bf0dd3f8f52cbc045539786" exitCode=0 Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.206120 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hn7ll" event={"ID":"426cb0ae-48a4-401b-9148-025568a01eaa","Type":"ContainerDied","Data":"1567e2665a3359ebd4248a67da4b5719ac593b037bf0dd3f8f52cbc045539786"} Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.254786 4753 generic.go:334] "Generic (PLEG): container finished" podID="f35bd233-6e5b-4099-8da5-2dda1519d793" containerID="3aec93846696e4790cb572c3a09bbc6fd6c845e2686ba965ad33853529730f9d" exitCode=0 Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.254891 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lk7z9" event={"ID":"f35bd233-6e5b-4099-8da5-2dda1519d793","Type":"ContainerDied","Data":"3aec93846696e4790cb572c3a09bbc6fd6c845e2686ba965ad33853529730f9d"} Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.254936 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lk7z9" event={"ID":"f35bd233-6e5b-4099-8da5-2dda1519d793","Type":"ContainerDied","Data":"94b5329128ddaa36e1822648a2e8263f095c06559bd129082949d518ba28c002"} Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.255039 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lk7z9" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.279791 4753 generic.go:334] "Generic (PLEG): container finished" podID="7aeabdb9-1aef-44d2-85e5-c17fcb4290be" containerID="5dda004f3240e21696f38a0796f2340a1651e251747a0f94bba8e94e5dbb8a4c" exitCode=0 Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.280028 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-8rtl5" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.280027 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-8rtl5" event={"ID":"7aeabdb9-1aef-44d2-85e5-c17fcb4290be","Type":"ContainerDied","Data":"5dda004f3240e21696f38a0796f2340a1651e251747a0f94bba8e94e5dbb8a4c"} Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.280233 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-8rtl5" event={"ID":"7aeabdb9-1aef-44d2-85e5-c17fcb4290be","Type":"ContainerDied","Data":"fac12ea0b48abf0e0bca39820cd1c83a170d329d13d4f533756917881749a51a"} Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.285325 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-kr92j" event={"ID":"303b3266-3775-4fcb-aac9-432b1fefaedc","Type":"ContainerStarted","Data":"b7aef6762b9172de52c01954b7eb03456a92b88ffe50934a41fc3a7cdd583d80"} Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.286009 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-kr92j" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.292354 4753 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-kr92j container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.52:8080/healthz\": dial tcp 10.217.0.52:8080: connect: connection refused" start-of-body= Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.292432 4753 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-kr92j" podUID="303b3266-3775-4fcb-aac9-432b1fefaedc" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.52:8080/healthz\": dial tcp 10.217.0.52:8080: connect: connection refused" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.294571 4753 scope.go:117] "RemoveContainer" containerID="024244304bc07028ae7f7b297c255c820f0750e6f555cd3059cc989353054d02" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.305335 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-s6f9l"] Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.312345 4753 generic.go:334] "Generic (PLEG): container finished" podID="b7552952-65cd-4686-89de-8cd96ba599f2" containerID="22a898834ab748a70de92d420109c304a69655763abd1f51589b399d0a9b760a" exitCode=0 Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.312505 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hh2kd" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.312507 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hh2kd" event={"ID":"b7552952-65cd-4686-89de-8cd96ba599f2","Type":"ContainerDied","Data":"22a898834ab748a70de92d420109c304a69655763abd1f51589b399d0a9b760a"} Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.317495 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hh2kd" event={"ID":"b7552952-65cd-4686-89de-8cd96ba599f2","Type":"ContainerDied","Data":"62d8a860fe6acc3db40f4a9a3f7d1fa7bc8316198c28af546880f1fd57d9fbfe"} Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.317544 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-s6f9l"] Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.342297 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-dbnjm"] Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.344579 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-dbnjm"] Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.346642 4753 scope.go:117] "RemoveContainer" containerID="6b5d72e58ee0dd0ef97d7d2260a3fea81d98ad1a6dea58d8a6eaf37131fdb69d" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.372266 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-lk7z9"] Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.377789 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-lk7z9"] Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.391231 4753 scope.go:117] "RemoveContainer" containerID="a9d3865af6941b82209ba4c5de476f72c8f223245c848bde840b1276f8a8578f" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.393892 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-kr92j" podStartSLOduration=1.393871431 podStartE2EDuration="1.393871431s" podCreationTimestamp="2025-12-05 17:18:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:18:59.392256365 +0000 UTC m=+877.895363371" watchObservedRunningTime="2025-12-05 17:18:59.393871431 +0000 UTC m=+877.896978437" Dec 05 17:18:59 crc kubenswrapper[4753]: E1205 17:18:59.394280 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a9d3865af6941b82209ba4c5de476f72c8f223245c848bde840b1276f8a8578f\": container with ID starting with a9d3865af6941b82209ba4c5de476f72c8f223245c848bde840b1276f8a8578f not found: ID does not exist" containerID="a9d3865af6941b82209ba4c5de476f72c8f223245c848bde840b1276f8a8578f" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.394396 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a9d3865af6941b82209ba4c5de476f72c8f223245c848bde840b1276f8a8578f"} err="failed to get container status \"a9d3865af6941b82209ba4c5de476f72c8f223245c848bde840b1276f8a8578f\": rpc error: code = NotFound desc = could not find container \"a9d3865af6941b82209ba4c5de476f72c8f223245c848bde840b1276f8a8578f\": container with ID starting with a9d3865af6941b82209ba4c5de476f72c8f223245c848bde840b1276f8a8578f not found: ID does not exist" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.394487 4753 scope.go:117] "RemoveContainer" containerID="024244304bc07028ae7f7b297c255c820f0750e6f555cd3059cc989353054d02" Dec 05 17:18:59 crc kubenswrapper[4753]: E1205 17:18:59.395335 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"024244304bc07028ae7f7b297c255c820f0750e6f555cd3059cc989353054d02\": container with ID starting with 024244304bc07028ae7f7b297c255c820f0750e6f555cd3059cc989353054d02 not found: ID does not exist" containerID="024244304bc07028ae7f7b297c255c820f0750e6f555cd3059cc989353054d02" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.395425 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"024244304bc07028ae7f7b297c255c820f0750e6f555cd3059cc989353054d02"} err="failed to get container status \"024244304bc07028ae7f7b297c255c820f0750e6f555cd3059cc989353054d02\": rpc error: code = NotFound desc = could not find container \"024244304bc07028ae7f7b297c255c820f0750e6f555cd3059cc989353054d02\": container with ID starting with 024244304bc07028ae7f7b297c255c820f0750e6f555cd3059cc989353054d02 not found: ID does not exist" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.395505 4753 scope.go:117] "RemoveContainer" containerID="6b5d72e58ee0dd0ef97d7d2260a3fea81d98ad1a6dea58d8a6eaf37131fdb69d" Dec 05 17:18:59 crc kubenswrapper[4753]: E1205 17:18:59.398151 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6b5d72e58ee0dd0ef97d7d2260a3fea81d98ad1a6dea58d8a6eaf37131fdb69d\": container with ID starting with 6b5d72e58ee0dd0ef97d7d2260a3fea81d98ad1a6dea58d8a6eaf37131fdb69d not found: ID does not exist" containerID="6b5d72e58ee0dd0ef97d7d2260a3fea81d98ad1a6dea58d8a6eaf37131fdb69d" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.398220 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6b5d72e58ee0dd0ef97d7d2260a3fea81d98ad1a6dea58d8a6eaf37131fdb69d"} err="failed to get container status \"6b5d72e58ee0dd0ef97d7d2260a3fea81d98ad1a6dea58d8a6eaf37131fdb69d\": rpc error: code = NotFound desc = could not find container \"6b5d72e58ee0dd0ef97d7d2260a3fea81d98ad1a6dea58d8a6eaf37131fdb69d\": container with ID starting with 6b5d72e58ee0dd0ef97d7d2260a3fea81d98ad1a6dea58d8a6eaf37131fdb69d not found: ID does not exist" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.398265 4753 scope.go:117] "RemoveContainer" containerID="2e3b27412ddd8a10cc4067e8f8da96e32b225773c7ac8787ecae7f5cb075dd3d" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.420567 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-8rtl5"] Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.430420 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-8rtl5"] Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.438362 4753 scope.go:117] "RemoveContainer" containerID="d079994f40eeb432b6f1ea8d30f94a06f09e5f4861c3ef72590911d5eb526427" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.443349 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-hh2kd"] Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.445338 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-hh2kd"] Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.472271 4753 scope.go:117] "RemoveContainer" containerID="6dc4bf794f9038adcfc36777073865ea0fe8ae64a5b55618a89faf786504992f" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.498306 4753 scope.go:117] "RemoveContainer" containerID="2e3b27412ddd8a10cc4067e8f8da96e32b225773c7ac8787ecae7f5cb075dd3d" Dec 05 17:18:59 crc kubenswrapper[4753]: E1205 17:18:59.499543 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2e3b27412ddd8a10cc4067e8f8da96e32b225773c7ac8787ecae7f5cb075dd3d\": container with ID starting with 2e3b27412ddd8a10cc4067e8f8da96e32b225773c7ac8787ecae7f5cb075dd3d not found: ID does not exist" containerID="2e3b27412ddd8a10cc4067e8f8da96e32b225773c7ac8787ecae7f5cb075dd3d" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.499606 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2e3b27412ddd8a10cc4067e8f8da96e32b225773c7ac8787ecae7f5cb075dd3d"} err="failed to get container status \"2e3b27412ddd8a10cc4067e8f8da96e32b225773c7ac8787ecae7f5cb075dd3d\": rpc error: code = NotFound desc = could not find container \"2e3b27412ddd8a10cc4067e8f8da96e32b225773c7ac8787ecae7f5cb075dd3d\": container with ID starting with 2e3b27412ddd8a10cc4067e8f8da96e32b225773c7ac8787ecae7f5cb075dd3d not found: ID does not exist" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.499648 4753 scope.go:117] "RemoveContainer" containerID="d079994f40eeb432b6f1ea8d30f94a06f09e5f4861c3ef72590911d5eb526427" Dec 05 17:18:59 crc kubenswrapper[4753]: E1205 17:18:59.500333 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d079994f40eeb432b6f1ea8d30f94a06f09e5f4861c3ef72590911d5eb526427\": container with ID starting with d079994f40eeb432b6f1ea8d30f94a06f09e5f4861c3ef72590911d5eb526427 not found: ID does not exist" containerID="d079994f40eeb432b6f1ea8d30f94a06f09e5f4861c3ef72590911d5eb526427" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.500393 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d079994f40eeb432b6f1ea8d30f94a06f09e5f4861c3ef72590911d5eb526427"} err="failed to get container status \"d079994f40eeb432b6f1ea8d30f94a06f09e5f4861c3ef72590911d5eb526427\": rpc error: code = NotFound desc = could not find container \"d079994f40eeb432b6f1ea8d30f94a06f09e5f4861c3ef72590911d5eb526427\": container with ID starting with d079994f40eeb432b6f1ea8d30f94a06f09e5f4861c3ef72590911d5eb526427 not found: ID does not exist" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.500431 4753 scope.go:117] "RemoveContainer" containerID="6dc4bf794f9038adcfc36777073865ea0fe8ae64a5b55618a89faf786504992f" Dec 05 17:18:59 crc kubenswrapper[4753]: E1205 17:18:59.500751 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6dc4bf794f9038adcfc36777073865ea0fe8ae64a5b55618a89faf786504992f\": container with ID starting with 6dc4bf794f9038adcfc36777073865ea0fe8ae64a5b55618a89faf786504992f not found: ID does not exist" containerID="6dc4bf794f9038adcfc36777073865ea0fe8ae64a5b55618a89faf786504992f" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.500780 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6dc4bf794f9038adcfc36777073865ea0fe8ae64a5b55618a89faf786504992f"} err="failed to get container status \"6dc4bf794f9038adcfc36777073865ea0fe8ae64a5b55618a89faf786504992f\": rpc error: code = NotFound desc = could not find container \"6dc4bf794f9038adcfc36777073865ea0fe8ae64a5b55618a89faf786504992f\": container with ID starting with 6dc4bf794f9038adcfc36777073865ea0fe8ae64a5b55618a89faf786504992f not found: ID does not exist" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.500804 4753 scope.go:117] "RemoveContainer" containerID="3aec93846696e4790cb572c3a09bbc6fd6c845e2686ba965ad33853529730f9d" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.521552 4753 scope.go:117] "RemoveContainer" containerID="ff005ed0c14b193f46ba141f0a2b6b3a6f144f304e1153b19636acb46e9f87bf" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.556661 4753 scope.go:117] "RemoveContainer" containerID="dbb60b6e2de7b4e0908b536eab3b5c777e363874e86285e5b6f7f6a98aea7f58" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.575556 4753 scope.go:117] "RemoveContainer" containerID="3aec93846696e4790cb572c3a09bbc6fd6c845e2686ba965ad33853529730f9d" Dec 05 17:18:59 crc kubenswrapper[4753]: E1205 17:18:59.576698 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3aec93846696e4790cb572c3a09bbc6fd6c845e2686ba965ad33853529730f9d\": container with ID starting with 3aec93846696e4790cb572c3a09bbc6fd6c845e2686ba965ad33853529730f9d not found: ID does not exist" containerID="3aec93846696e4790cb572c3a09bbc6fd6c845e2686ba965ad33853529730f9d" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.576741 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3aec93846696e4790cb572c3a09bbc6fd6c845e2686ba965ad33853529730f9d"} err="failed to get container status \"3aec93846696e4790cb572c3a09bbc6fd6c845e2686ba965ad33853529730f9d\": rpc error: code = NotFound desc = could not find container \"3aec93846696e4790cb572c3a09bbc6fd6c845e2686ba965ad33853529730f9d\": container with ID starting with 3aec93846696e4790cb572c3a09bbc6fd6c845e2686ba965ad33853529730f9d not found: ID does not exist" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.576764 4753 scope.go:117] "RemoveContainer" containerID="ff005ed0c14b193f46ba141f0a2b6b3a6f144f304e1153b19636acb46e9f87bf" Dec 05 17:18:59 crc kubenswrapper[4753]: E1205 17:18:59.577016 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ff005ed0c14b193f46ba141f0a2b6b3a6f144f304e1153b19636acb46e9f87bf\": container with ID starting with ff005ed0c14b193f46ba141f0a2b6b3a6f144f304e1153b19636acb46e9f87bf not found: ID does not exist" containerID="ff005ed0c14b193f46ba141f0a2b6b3a6f144f304e1153b19636acb46e9f87bf" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.577037 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff005ed0c14b193f46ba141f0a2b6b3a6f144f304e1153b19636acb46e9f87bf"} err="failed to get container status \"ff005ed0c14b193f46ba141f0a2b6b3a6f144f304e1153b19636acb46e9f87bf\": rpc error: code = NotFound desc = could not find container \"ff005ed0c14b193f46ba141f0a2b6b3a6f144f304e1153b19636acb46e9f87bf\": container with ID starting with ff005ed0c14b193f46ba141f0a2b6b3a6f144f304e1153b19636acb46e9f87bf not found: ID does not exist" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.577055 4753 scope.go:117] "RemoveContainer" containerID="dbb60b6e2de7b4e0908b536eab3b5c777e363874e86285e5b6f7f6a98aea7f58" Dec 05 17:18:59 crc kubenswrapper[4753]: E1205 17:18:59.577326 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dbb60b6e2de7b4e0908b536eab3b5c777e363874e86285e5b6f7f6a98aea7f58\": container with ID starting with dbb60b6e2de7b4e0908b536eab3b5c777e363874e86285e5b6f7f6a98aea7f58 not found: ID does not exist" containerID="dbb60b6e2de7b4e0908b536eab3b5c777e363874e86285e5b6f7f6a98aea7f58" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.577350 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dbb60b6e2de7b4e0908b536eab3b5c777e363874e86285e5b6f7f6a98aea7f58"} err="failed to get container status \"dbb60b6e2de7b4e0908b536eab3b5c777e363874e86285e5b6f7f6a98aea7f58\": rpc error: code = NotFound desc = could not find container \"dbb60b6e2de7b4e0908b536eab3b5c777e363874e86285e5b6f7f6a98aea7f58\": container with ID starting with dbb60b6e2de7b4e0908b536eab3b5c777e363874e86285e5b6f7f6a98aea7f58 not found: ID does not exist" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.577363 4753 scope.go:117] "RemoveContainer" containerID="5dda004f3240e21696f38a0796f2340a1651e251747a0f94bba8e94e5dbb8a4c" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.609510 4753 scope.go:117] "RemoveContainer" containerID="5dda004f3240e21696f38a0796f2340a1651e251747a0f94bba8e94e5dbb8a4c" Dec 05 17:18:59 crc kubenswrapper[4753]: E1205 17:18:59.610407 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5dda004f3240e21696f38a0796f2340a1651e251747a0f94bba8e94e5dbb8a4c\": container with ID starting with 5dda004f3240e21696f38a0796f2340a1651e251747a0f94bba8e94e5dbb8a4c not found: ID does not exist" containerID="5dda004f3240e21696f38a0796f2340a1651e251747a0f94bba8e94e5dbb8a4c" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.610539 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5dda004f3240e21696f38a0796f2340a1651e251747a0f94bba8e94e5dbb8a4c"} err="failed to get container status \"5dda004f3240e21696f38a0796f2340a1651e251747a0f94bba8e94e5dbb8a4c\": rpc error: code = NotFound desc = could not find container \"5dda004f3240e21696f38a0796f2340a1651e251747a0f94bba8e94e5dbb8a4c\": container with ID starting with 5dda004f3240e21696f38a0796f2340a1651e251747a0f94bba8e94e5dbb8a4c not found: ID does not exist" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.610619 4753 scope.go:117] "RemoveContainer" containerID="22a898834ab748a70de92d420109c304a69655763abd1f51589b399d0a9b760a" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.627784 4753 scope.go:117] "RemoveContainer" containerID="f380278697bb8f5b58161863e01879b9727dc70e112a1d4407d2179d7886ec9f" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.650354 4753 scope.go:117] "RemoveContainer" containerID="08b10796122e202d602941ff908d7988da304bfc25841c2e32b17b4281c68c4d" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.651522 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hn7ll" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.666284 4753 scope.go:117] "RemoveContainer" containerID="22a898834ab748a70de92d420109c304a69655763abd1f51589b399d0a9b760a" Dec 05 17:18:59 crc kubenswrapper[4753]: E1205 17:18:59.669560 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"22a898834ab748a70de92d420109c304a69655763abd1f51589b399d0a9b760a\": container with ID starting with 22a898834ab748a70de92d420109c304a69655763abd1f51589b399d0a9b760a not found: ID does not exist" containerID="22a898834ab748a70de92d420109c304a69655763abd1f51589b399d0a9b760a" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.669688 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22a898834ab748a70de92d420109c304a69655763abd1f51589b399d0a9b760a"} err="failed to get container status \"22a898834ab748a70de92d420109c304a69655763abd1f51589b399d0a9b760a\": rpc error: code = NotFound desc = could not find container \"22a898834ab748a70de92d420109c304a69655763abd1f51589b399d0a9b760a\": container with ID starting with 22a898834ab748a70de92d420109c304a69655763abd1f51589b399d0a9b760a not found: ID does not exist" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.669766 4753 scope.go:117] "RemoveContainer" containerID="f380278697bb8f5b58161863e01879b9727dc70e112a1d4407d2179d7886ec9f" Dec 05 17:18:59 crc kubenswrapper[4753]: E1205 17:18:59.670267 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f380278697bb8f5b58161863e01879b9727dc70e112a1d4407d2179d7886ec9f\": container with ID starting with f380278697bb8f5b58161863e01879b9727dc70e112a1d4407d2179d7886ec9f not found: ID does not exist" containerID="f380278697bb8f5b58161863e01879b9727dc70e112a1d4407d2179d7886ec9f" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.670327 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f380278697bb8f5b58161863e01879b9727dc70e112a1d4407d2179d7886ec9f"} err="failed to get container status \"f380278697bb8f5b58161863e01879b9727dc70e112a1d4407d2179d7886ec9f\": rpc error: code = NotFound desc = could not find container \"f380278697bb8f5b58161863e01879b9727dc70e112a1d4407d2179d7886ec9f\": container with ID starting with f380278697bb8f5b58161863e01879b9727dc70e112a1d4407d2179d7886ec9f not found: ID does not exist" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.670368 4753 scope.go:117] "RemoveContainer" containerID="08b10796122e202d602941ff908d7988da304bfc25841c2e32b17b4281c68c4d" Dec 05 17:18:59 crc kubenswrapper[4753]: E1205 17:18:59.670700 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"08b10796122e202d602941ff908d7988da304bfc25841c2e32b17b4281c68c4d\": container with ID starting with 08b10796122e202d602941ff908d7988da304bfc25841c2e32b17b4281c68c4d not found: ID does not exist" containerID="08b10796122e202d602941ff908d7988da304bfc25841c2e32b17b4281c68c4d" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.670810 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"08b10796122e202d602941ff908d7988da304bfc25841c2e32b17b4281c68c4d"} err="failed to get container status \"08b10796122e202d602941ff908d7988da304bfc25841c2e32b17b4281c68c4d\": rpc error: code = NotFound desc = could not find container \"08b10796122e202d602941ff908d7988da304bfc25841c2e32b17b4281c68c4d\": container with ID starting with 08b10796122e202d602941ff908d7988da304bfc25841c2e32b17b4281c68c4d not found: ID does not exist" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.754366 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2a300708-258b-4ba6-b7c1-c46b90c8ec36" path="/var/lib/kubelet/pods/2a300708-258b-4ba6-b7c1-c46b90c8ec36/volumes" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.755887 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2a7a8437-e7be-454d-bc02-71af554d390b" path="/var/lib/kubelet/pods/2a7a8437-e7be-454d-bc02-71af554d390b/volumes" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.756762 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7aeabdb9-1aef-44d2-85e5-c17fcb4290be" path="/var/lib/kubelet/pods/7aeabdb9-1aef-44d2-85e5-c17fcb4290be/volumes" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.757902 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="940d564a-350c-4480-beae-dda46c53287a" path="/var/lib/kubelet/pods/940d564a-350c-4480-beae-dda46c53287a/volumes" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.759916 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b7552952-65cd-4686-89de-8cd96ba599f2" path="/var/lib/kubelet/pods/b7552952-65cd-4686-89de-8cd96ba599f2/volumes" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.761506 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f2d3390d-b8d7-48c7-ab98-a4cfb3ebdf9c" path="/var/lib/kubelet/pods/f2d3390d-b8d7-48c7-ab98-a4cfb3ebdf9c/volumes" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.762136 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f35bd233-6e5b-4099-8da5-2dda1519d793" path="/var/lib/kubelet/pods/f35bd233-6e5b-4099-8da5-2dda1519d793/volumes" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.775353 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-6ndfl"] Dec 05 17:18:59 crc kubenswrapper[4753]: E1205 17:18:59.775641 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f35bd233-6e5b-4099-8da5-2dda1519d793" containerName="registry-server" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.775657 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="f35bd233-6e5b-4099-8da5-2dda1519d793" containerName="registry-server" Dec 05 17:18:59 crc kubenswrapper[4753]: E1205 17:18:59.775668 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a7a8437-e7be-454d-bc02-71af554d390b" containerName="extract-utilities" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.775678 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a7a8437-e7be-454d-bc02-71af554d390b" containerName="extract-utilities" Dec 05 17:18:59 crc kubenswrapper[4753]: E1205 17:18:59.775708 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="426cb0ae-48a4-401b-9148-025568a01eaa" containerName="extract-content" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.775715 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="426cb0ae-48a4-401b-9148-025568a01eaa" containerName="extract-content" Dec 05 17:18:59 crc kubenswrapper[4753]: E1205 17:18:59.775723 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7552952-65cd-4686-89de-8cd96ba599f2" containerName="extract-content" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.775729 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7552952-65cd-4686-89de-8cd96ba599f2" containerName="extract-content" Dec 05 17:18:59 crc kubenswrapper[4753]: E1205 17:18:59.775737 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a300708-258b-4ba6-b7c1-c46b90c8ec36" containerName="extract-utilities" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.775745 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a300708-258b-4ba6-b7c1-c46b90c8ec36" containerName="extract-utilities" Dec 05 17:18:59 crc kubenswrapper[4753]: E1205 17:18:59.775753 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f35bd233-6e5b-4099-8da5-2dda1519d793" containerName="extract-content" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.775758 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="f35bd233-6e5b-4099-8da5-2dda1519d793" containerName="extract-content" Dec 05 17:18:59 crc kubenswrapper[4753]: E1205 17:18:59.775768 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7552952-65cd-4686-89de-8cd96ba599f2" containerName="registry-server" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.775795 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7552952-65cd-4686-89de-8cd96ba599f2" containerName="registry-server" Dec 05 17:18:59 crc kubenswrapper[4753]: E1205 17:18:59.775806 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7552952-65cd-4686-89de-8cd96ba599f2" containerName="extract-utilities" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.775812 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7552952-65cd-4686-89de-8cd96ba599f2" containerName="extract-utilities" Dec 05 17:18:59 crc kubenswrapper[4753]: E1205 17:18:59.775821 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f35bd233-6e5b-4099-8da5-2dda1519d793" containerName="extract-utilities" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.775827 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="f35bd233-6e5b-4099-8da5-2dda1519d793" containerName="extract-utilities" Dec 05 17:18:59 crc kubenswrapper[4753]: E1205 17:18:59.775835 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a300708-258b-4ba6-b7c1-c46b90c8ec36" containerName="registry-server" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.775841 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a300708-258b-4ba6-b7c1-c46b90c8ec36" containerName="registry-server" Dec 05 17:18:59 crc kubenswrapper[4753]: E1205 17:18:59.775849 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a7a8437-e7be-454d-bc02-71af554d390b" containerName="extract-content" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.775875 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a7a8437-e7be-454d-bc02-71af554d390b" containerName="extract-content" Dec 05 17:18:59 crc kubenswrapper[4753]: E1205 17:18:59.775885 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a300708-258b-4ba6-b7c1-c46b90c8ec36" containerName="extract-content" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.775891 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a300708-258b-4ba6-b7c1-c46b90c8ec36" containerName="extract-content" Dec 05 17:18:59 crc kubenswrapper[4753]: E1205 17:18:59.775899 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="426cb0ae-48a4-401b-9148-025568a01eaa" containerName="extract-utilities" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.775906 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="426cb0ae-48a4-401b-9148-025568a01eaa" containerName="extract-utilities" Dec 05 17:18:59 crc kubenswrapper[4753]: E1205 17:18:59.775915 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7aeabdb9-1aef-44d2-85e5-c17fcb4290be" containerName="marketplace-operator" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.775921 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="7aeabdb9-1aef-44d2-85e5-c17fcb4290be" containerName="marketplace-operator" Dec 05 17:18:59 crc kubenswrapper[4753]: E1205 17:18:59.775930 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a7a8437-e7be-454d-bc02-71af554d390b" containerName="registry-server" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.775937 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a7a8437-e7be-454d-bc02-71af554d390b" containerName="registry-server" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.776334 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="2a300708-258b-4ba6-b7c1-c46b90c8ec36" containerName="registry-server" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.776350 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="b7552952-65cd-4686-89de-8cd96ba599f2" containerName="registry-server" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.776359 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="7aeabdb9-1aef-44d2-85e5-c17fcb4290be" containerName="marketplace-operator" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.776368 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="426cb0ae-48a4-401b-9148-025568a01eaa" containerName="extract-content" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.776399 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="f35bd233-6e5b-4099-8da5-2dda1519d793" containerName="registry-server" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.776408 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="2a7a8437-e7be-454d-bc02-71af554d390b" containerName="registry-server" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.777348 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6ndfl" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.783876 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/426cb0ae-48a4-401b-9148-025568a01eaa-catalog-content\") pod \"426cb0ae-48a4-401b-9148-025568a01eaa\" (UID: \"426cb0ae-48a4-401b-9148-025568a01eaa\") " Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.783921 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/426cb0ae-48a4-401b-9148-025568a01eaa-utilities\") pod \"426cb0ae-48a4-401b-9148-025568a01eaa\" (UID: \"426cb0ae-48a4-401b-9148-025568a01eaa\") " Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.783969 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mv8b4\" (UniqueName: \"kubernetes.io/projected/426cb0ae-48a4-401b-9148-025568a01eaa-kube-api-access-mv8b4\") pod \"426cb0ae-48a4-401b-9148-025568a01eaa\" (UID: \"426cb0ae-48a4-401b-9148-025568a01eaa\") " Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.786186 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/426cb0ae-48a4-401b-9148-025568a01eaa-utilities" (OuterVolumeSpecName: "utilities") pod "426cb0ae-48a4-401b-9148-025568a01eaa" (UID: "426cb0ae-48a4-401b-9148-025568a01eaa"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.792372 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/426cb0ae-48a4-401b-9148-025568a01eaa-kube-api-access-mv8b4" (OuterVolumeSpecName: "kube-api-access-mv8b4") pod "426cb0ae-48a4-401b-9148-025568a01eaa" (UID: "426cb0ae-48a4-401b-9148-025568a01eaa"). InnerVolumeSpecName "kube-api-access-mv8b4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.794088 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-6ndfl"] Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.826929 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/426cb0ae-48a4-401b-9148-025568a01eaa-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "426cb0ae-48a4-401b-9148-025568a01eaa" (UID: "426cb0ae-48a4-401b-9148-025568a01eaa"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.885581 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wwts5\" (UniqueName: \"kubernetes.io/projected/b29e1fdb-cc3c-412d-9194-8f40a860b5f2-kube-api-access-wwts5\") pod \"redhat-marketplace-6ndfl\" (UID: \"b29e1fdb-cc3c-412d-9194-8f40a860b5f2\") " pod="openshift-marketplace/redhat-marketplace-6ndfl" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.885647 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b29e1fdb-cc3c-412d-9194-8f40a860b5f2-utilities\") pod \"redhat-marketplace-6ndfl\" (UID: \"b29e1fdb-cc3c-412d-9194-8f40a860b5f2\") " pod="openshift-marketplace/redhat-marketplace-6ndfl" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.885745 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b29e1fdb-cc3c-412d-9194-8f40a860b5f2-catalog-content\") pod \"redhat-marketplace-6ndfl\" (UID: \"b29e1fdb-cc3c-412d-9194-8f40a860b5f2\") " pod="openshift-marketplace/redhat-marketplace-6ndfl" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.885834 4753 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/426cb0ae-48a4-401b-9148-025568a01eaa-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.885849 4753 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/426cb0ae-48a4-401b-9148-025568a01eaa-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.885861 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mv8b4\" (UniqueName: \"kubernetes.io/projected/426cb0ae-48a4-401b-9148-025568a01eaa-kube-api-access-mv8b4\") on node \"crc\" DevicePath \"\"" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.987294 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wwts5\" (UniqueName: \"kubernetes.io/projected/b29e1fdb-cc3c-412d-9194-8f40a860b5f2-kube-api-access-wwts5\") pod \"redhat-marketplace-6ndfl\" (UID: \"b29e1fdb-cc3c-412d-9194-8f40a860b5f2\") " pod="openshift-marketplace/redhat-marketplace-6ndfl" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.987401 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b29e1fdb-cc3c-412d-9194-8f40a860b5f2-utilities\") pod \"redhat-marketplace-6ndfl\" (UID: \"b29e1fdb-cc3c-412d-9194-8f40a860b5f2\") " pod="openshift-marketplace/redhat-marketplace-6ndfl" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.987456 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b29e1fdb-cc3c-412d-9194-8f40a860b5f2-catalog-content\") pod \"redhat-marketplace-6ndfl\" (UID: \"b29e1fdb-cc3c-412d-9194-8f40a860b5f2\") " pod="openshift-marketplace/redhat-marketplace-6ndfl" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.988188 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b29e1fdb-cc3c-412d-9194-8f40a860b5f2-utilities\") pod \"redhat-marketplace-6ndfl\" (UID: \"b29e1fdb-cc3c-412d-9194-8f40a860b5f2\") " pod="openshift-marketplace/redhat-marketplace-6ndfl" Dec 05 17:18:59 crc kubenswrapper[4753]: I1205 17:18:59.988216 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b29e1fdb-cc3c-412d-9194-8f40a860b5f2-catalog-content\") pod \"redhat-marketplace-6ndfl\" (UID: \"b29e1fdb-cc3c-412d-9194-8f40a860b5f2\") " pod="openshift-marketplace/redhat-marketplace-6ndfl" Dec 05 17:19:00 crc kubenswrapper[4753]: I1205 17:19:00.009227 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wwts5\" (UniqueName: \"kubernetes.io/projected/b29e1fdb-cc3c-412d-9194-8f40a860b5f2-kube-api-access-wwts5\") pod \"redhat-marketplace-6ndfl\" (UID: \"b29e1fdb-cc3c-412d-9194-8f40a860b5f2\") " pod="openshift-marketplace/redhat-marketplace-6ndfl" Dec 05 17:19:00 crc kubenswrapper[4753]: I1205 17:19:00.093670 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6ndfl" Dec 05 17:19:00 crc kubenswrapper[4753]: I1205 17:19:00.330573 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-hn7ll" event={"ID":"426cb0ae-48a4-401b-9148-025568a01eaa","Type":"ContainerDied","Data":"b4cf5b9bd0ec2947d80dcc2f114d66c6163e69deca92375446dd7b216b8b2122"} Dec 05 17:19:00 crc kubenswrapper[4753]: I1205 17:19:00.330695 4753 scope.go:117] "RemoveContainer" containerID="1567e2665a3359ebd4248a67da4b5719ac593b037bf0dd3f8f52cbc045539786" Dec 05 17:19:00 crc kubenswrapper[4753]: I1205 17:19:00.330689 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-hn7ll" Dec 05 17:19:00 crc kubenswrapper[4753]: I1205 17:19:00.336099 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-kr92j" event={"ID":"303b3266-3775-4fcb-aac9-432b1fefaedc","Type":"ContainerStarted","Data":"f491dd56f75407fa4eea64ee718cf9607df5f9d974c16f486321ca01b5ff4c0b"} Dec 05 17:19:00 crc kubenswrapper[4753]: I1205 17:19:00.340183 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-kr92j" Dec 05 17:19:00 crc kubenswrapper[4753]: I1205 17:19:00.385591 4753 scope.go:117] "RemoveContainer" containerID="289c3bfd74234230a82966710967751fa85d309e31367c437641dc61c8c56905" Dec 05 17:19:00 crc kubenswrapper[4753]: I1205 17:19:00.397955 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-djnfb"] Dec 05 17:19:00 crc kubenswrapper[4753]: I1205 17:19:00.402831 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-djnfb"] Dec 05 17:19:00 crc kubenswrapper[4753]: I1205 17:19:00.403006 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-djnfb" Dec 05 17:19:00 crc kubenswrapper[4753]: I1205 17:19:00.414860 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 05 17:19:00 crc kubenswrapper[4753]: I1205 17:19:00.441526 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-hn7ll"] Dec 05 17:19:00 crc kubenswrapper[4753]: I1205 17:19:00.445578 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-hn7ll"] Dec 05 17:19:00 crc kubenswrapper[4753]: I1205 17:19:00.504758 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pl59c\" (UniqueName: \"kubernetes.io/projected/93a67264-59fa-4e32-868c-d4d308a5fed2-kube-api-access-pl59c\") pod \"certified-operators-djnfb\" (UID: \"93a67264-59fa-4e32-868c-d4d308a5fed2\") " pod="openshift-marketplace/certified-operators-djnfb" Dec 05 17:19:00 crc kubenswrapper[4753]: I1205 17:19:00.505323 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/93a67264-59fa-4e32-868c-d4d308a5fed2-catalog-content\") pod \"certified-operators-djnfb\" (UID: \"93a67264-59fa-4e32-868c-d4d308a5fed2\") " pod="openshift-marketplace/certified-operators-djnfb" Dec 05 17:19:00 crc kubenswrapper[4753]: I1205 17:19:00.505353 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/93a67264-59fa-4e32-868c-d4d308a5fed2-utilities\") pod \"certified-operators-djnfb\" (UID: \"93a67264-59fa-4e32-868c-d4d308a5fed2\") " pod="openshift-marketplace/certified-operators-djnfb" Dec 05 17:19:00 crc kubenswrapper[4753]: I1205 17:19:00.509331 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-6ndfl"] Dec 05 17:19:00 crc kubenswrapper[4753]: I1205 17:19:00.606911 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pl59c\" (UniqueName: \"kubernetes.io/projected/93a67264-59fa-4e32-868c-d4d308a5fed2-kube-api-access-pl59c\") pod \"certified-operators-djnfb\" (UID: \"93a67264-59fa-4e32-868c-d4d308a5fed2\") " pod="openshift-marketplace/certified-operators-djnfb" Dec 05 17:19:00 crc kubenswrapper[4753]: I1205 17:19:00.606993 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/93a67264-59fa-4e32-868c-d4d308a5fed2-catalog-content\") pod \"certified-operators-djnfb\" (UID: \"93a67264-59fa-4e32-868c-d4d308a5fed2\") " pod="openshift-marketplace/certified-operators-djnfb" Dec 05 17:19:00 crc kubenswrapper[4753]: I1205 17:19:00.607027 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/93a67264-59fa-4e32-868c-d4d308a5fed2-utilities\") pod \"certified-operators-djnfb\" (UID: \"93a67264-59fa-4e32-868c-d4d308a5fed2\") " pod="openshift-marketplace/certified-operators-djnfb" Dec 05 17:19:00 crc kubenswrapper[4753]: I1205 17:19:00.607771 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/93a67264-59fa-4e32-868c-d4d308a5fed2-utilities\") pod \"certified-operators-djnfb\" (UID: \"93a67264-59fa-4e32-868c-d4d308a5fed2\") " pod="openshift-marketplace/certified-operators-djnfb" Dec 05 17:19:00 crc kubenswrapper[4753]: I1205 17:19:00.609623 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/93a67264-59fa-4e32-868c-d4d308a5fed2-catalog-content\") pod \"certified-operators-djnfb\" (UID: \"93a67264-59fa-4e32-868c-d4d308a5fed2\") " pod="openshift-marketplace/certified-operators-djnfb" Dec 05 17:19:00 crc kubenswrapper[4753]: I1205 17:19:00.629326 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pl59c\" (UniqueName: \"kubernetes.io/projected/93a67264-59fa-4e32-868c-d4d308a5fed2-kube-api-access-pl59c\") pod \"certified-operators-djnfb\" (UID: \"93a67264-59fa-4e32-868c-d4d308a5fed2\") " pod="openshift-marketplace/certified-operators-djnfb" Dec 05 17:19:00 crc kubenswrapper[4753]: I1205 17:19:00.748295 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-djnfb" Dec 05 17:19:00 crc kubenswrapper[4753]: I1205 17:19:00.778958 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-t2mtx"] Dec 05 17:19:00 crc kubenswrapper[4753]: I1205 17:19:00.781182 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-t2mtx" Dec 05 17:19:00 crc kubenswrapper[4753]: I1205 17:19:00.788540 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-t2mtx"] Dec 05 17:19:00 crc kubenswrapper[4753]: I1205 17:19:00.911990 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c8552445-6e0f-49f6-920a-46a2b6604221-catalog-content\") pod \"certified-operators-t2mtx\" (UID: \"c8552445-6e0f-49f6-920a-46a2b6604221\") " pod="openshift-marketplace/certified-operators-t2mtx" Dec 05 17:19:00 crc kubenswrapper[4753]: I1205 17:19:00.912608 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jdb76\" (UniqueName: \"kubernetes.io/projected/c8552445-6e0f-49f6-920a-46a2b6604221-kube-api-access-jdb76\") pod \"certified-operators-t2mtx\" (UID: \"c8552445-6e0f-49f6-920a-46a2b6604221\") " pod="openshift-marketplace/certified-operators-t2mtx" Dec 05 17:19:00 crc kubenswrapper[4753]: I1205 17:19:00.912757 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c8552445-6e0f-49f6-920a-46a2b6604221-utilities\") pod \"certified-operators-t2mtx\" (UID: \"c8552445-6e0f-49f6-920a-46a2b6604221\") " pod="openshift-marketplace/certified-operators-t2mtx" Dec 05 17:19:00 crc kubenswrapper[4753]: I1205 17:19:00.980947 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-djnfb"] Dec 05 17:19:01 crc kubenswrapper[4753]: I1205 17:19:01.013762 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c8552445-6e0f-49f6-920a-46a2b6604221-utilities\") pod \"certified-operators-t2mtx\" (UID: \"c8552445-6e0f-49f6-920a-46a2b6604221\") " pod="openshift-marketplace/certified-operators-t2mtx" Dec 05 17:19:01 crc kubenswrapper[4753]: I1205 17:19:01.013855 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c8552445-6e0f-49f6-920a-46a2b6604221-catalog-content\") pod \"certified-operators-t2mtx\" (UID: \"c8552445-6e0f-49f6-920a-46a2b6604221\") " pod="openshift-marketplace/certified-operators-t2mtx" Dec 05 17:19:01 crc kubenswrapper[4753]: I1205 17:19:01.013881 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jdb76\" (UniqueName: \"kubernetes.io/projected/c8552445-6e0f-49f6-920a-46a2b6604221-kube-api-access-jdb76\") pod \"certified-operators-t2mtx\" (UID: \"c8552445-6e0f-49f6-920a-46a2b6604221\") " pod="openshift-marketplace/certified-operators-t2mtx" Dec 05 17:19:01 crc kubenswrapper[4753]: I1205 17:19:01.014465 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c8552445-6e0f-49f6-920a-46a2b6604221-utilities\") pod \"certified-operators-t2mtx\" (UID: \"c8552445-6e0f-49f6-920a-46a2b6604221\") " pod="openshift-marketplace/certified-operators-t2mtx" Dec 05 17:19:01 crc kubenswrapper[4753]: I1205 17:19:01.014529 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c8552445-6e0f-49f6-920a-46a2b6604221-catalog-content\") pod \"certified-operators-t2mtx\" (UID: \"c8552445-6e0f-49f6-920a-46a2b6604221\") " pod="openshift-marketplace/certified-operators-t2mtx" Dec 05 17:19:01 crc kubenswrapper[4753]: I1205 17:19:01.046293 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jdb76\" (UniqueName: \"kubernetes.io/projected/c8552445-6e0f-49f6-920a-46a2b6604221-kube-api-access-jdb76\") pod \"certified-operators-t2mtx\" (UID: \"c8552445-6e0f-49f6-920a-46a2b6604221\") " pod="openshift-marketplace/certified-operators-t2mtx" Dec 05 17:19:01 crc kubenswrapper[4753]: I1205 17:19:01.098628 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-t2mtx" Dec 05 17:19:01 crc kubenswrapper[4753]: I1205 17:19:01.331259 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-t2mtx"] Dec 05 17:19:01 crc kubenswrapper[4753]: I1205 17:19:01.352510 4753 generic.go:334] "Generic (PLEG): container finished" podID="93a67264-59fa-4e32-868c-d4d308a5fed2" containerID="ffd28a3e6a3a02a17800406b18b5bfb28d47c5f84f5f5e7aa1d5e5d0f2672b4c" exitCode=0 Dec 05 17:19:01 crc kubenswrapper[4753]: I1205 17:19:01.352606 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-djnfb" event={"ID":"93a67264-59fa-4e32-868c-d4d308a5fed2","Type":"ContainerDied","Data":"ffd28a3e6a3a02a17800406b18b5bfb28d47c5f84f5f5e7aa1d5e5d0f2672b4c"} Dec 05 17:19:01 crc kubenswrapper[4753]: I1205 17:19:01.352669 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-djnfb" event={"ID":"93a67264-59fa-4e32-868c-d4d308a5fed2","Type":"ContainerStarted","Data":"4424c00987963d0487ab1741d3ce892c8cd3a8516f4d6e4760d3a6a650d701f6"} Dec 05 17:19:01 crc kubenswrapper[4753]: I1205 17:19:01.354712 4753 generic.go:334] "Generic (PLEG): container finished" podID="b29e1fdb-cc3c-412d-9194-8f40a860b5f2" containerID="876436a38191f939e077a2a60dd14e33f59a23315ea598b30eb26ad0cd2b13ca" exitCode=0 Dec 05 17:19:01 crc kubenswrapper[4753]: I1205 17:19:01.354816 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6ndfl" event={"ID":"b29e1fdb-cc3c-412d-9194-8f40a860b5f2","Type":"ContainerDied","Data":"876436a38191f939e077a2a60dd14e33f59a23315ea598b30eb26ad0cd2b13ca"} Dec 05 17:19:01 crc kubenswrapper[4753]: I1205 17:19:01.354867 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6ndfl" event={"ID":"b29e1fdb-cc3c-412d-9194-8f40a860b5f2","Type":"ContainerStarted","Data":"2dc7a6c4b583e612e41f492403265f2a43ef714f82359799a9f3c8da6b369132"} Dec 05 17:19:01 crc kubenswrapper[4753]: I1205 17:19:01.358699 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t2mtx" event={"ID":"c8552445-6e0f-49f6-920a-46a2b6604221","Type":"ContainerStarted","Data":"5ecb9b81746c64e3cc94b7d46214e507fa26df09ee34c366f20237ed9db0ad59"} Dec 05 17:19:01 crc kubenswrapper[4753]: I1205 17:19:01.729055 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="426cb0ae-48a4-401b-9148-025568a01eaa" path="/var/lib/kubelet/pods/426cb0ae-48a4-401b-9148-025568a01eaa/volumes" Dec 05 17:19:02 crc kubenswrapper[4753]: I1205 17:19:02.174194 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-dsvw6"] Dec 05 17:19:02 crc kubenswrapper[4753]: I1205 17:19:02.176418 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dsvw6" Dec 05 17:19:02 crc kubenswrapper[4753]: I1205 17:19:02.179886 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 05 17:19:02 crc kubenswrapper[4753]: I1205 17:19:02.187276 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dsvw6"] Dec 05 17:19:02 crc kubenswrapper[4753]: I1205 17:19:02.230561 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/40a8284b-f439-41a8-a064-9582c9d50ec4-utilities\") pod \"community-operators-dsvw6\" (UID: \"40a8284b-f439-41a8-a064-9582c9d50ec4\") " pod="openshift-marketplace/community-operators-dsvw6" Dec 05 17:19:02 crc kubenswrapper[4753]: I1205 17:19:02.230656 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9rgcj\" (UniqueName: \"kubernetes.io/projected/40a8284b-f439-41a8-a064-9582c9d50ec4-kube-api-access-9rgcj\") pod \"community-operators-dsvw6\" (UID: \"40a8284b-f439-41a8-a064-9582c9d50ec4\") " pod="openshift-marketplace/community-operators-dsvw6" Dec 05 17:19:02 crc kubenswrapper[4753]: I1205 17:19:02.230989 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/40a8284b-f439-41a8-a064-9582c9d50ec4-catalog-content\") pod \"community-operators-dsvw6\" (UID: \"40a8284b-f439-41a8-a064-9582c9d50ec4\") " pod="openshift-marketplace/community-operators-dsvw6" Dec 05 17:19:02 crc kubenswrapper[4753]: I1205 17:19:02.332786 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/40a8284b-f439-41a8-a064-9582c9d50ec4-catalog-content\") pod \"community-operators-dsvw6\" (UID: \"40a8284b-f439-41a8-a064-9582c9d50ec4\") " pod="openshift-marketplace/community-operators-dsvw6" Dec 05 17:19:02 crc kubenswrapper[4753]: I1205 17:19:02.332903 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/40a8284b-f439-41a8-a064-9582c9d50ec4-utilities\") pod \"community-operators-dsvw6\" (UID: \"40a8284b-f439-41a8-a064-9582c9d50ec4\") " pod="openshift-marketplace/community-operators-dsvw6" Dec 05 17:19:02 crc kubenswrapper[4753]: I1205 17:19:02.332937 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9rgcj\" (UniqueName: \"kubernetes.io/projected/40a8284b-f439-41a8-a064-9582c9d50ec4-kube-api-access-9rgcj\") pod \"community-operators-dsvw6\" (UID: \"40a8284b-f439-41a8-a064-9582c9d50ec4\") " pod="openshift-marketplace/community-operators-dsvw6" Dec 05 17:19:02 crc kubenswrapper[4753]: I1205 17:19:02.333519 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/40a8284b-f439-41a8-a064-9582c9d50ec4-catalog-content\") pod \"community-operators-dsvw6\" (UID: \"40a8284b-f439-41a8-a064-9582c9d50ec4\") " pod="openshift-marketplace/community-operators-dsvw6" Dec 05 17:19:02 crc kubenswrapper[4753]: I1205 17:19:02.333554 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/40a8284b-f439-41a8-a064-9582c9d50ec4-utilities\") pod \"community-operators-dsvw6\" (UID: \"40a8284b-f439-41a8-a064-9582c9d50ec4\") " pod="openshift-marketplace/community-operators-dsvw6" Dec 05 17:19:02 crc kubenswrapper[4753]: I1205 17:19:02.359542 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9rgcj\" (UniqueName: \"kubernetes.io/projected/40a8284b-f439-41a8-a064-9582c9d50ec4-kube-api-access-9rgcj\") pod \"community-operators-dsvw6\" (UID: \"40a8284b-f439-41a8-a064-9582c9d50ec4\") " pod="openshift-marketplace/community-operators-dsvw6" Dec 05 17:19:02 crc kubenswrapper[4753]: I1205 17:19:02.366073 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-djnfb" event={"ID":"93a67264-59fa-4e32-868c-d4d308a5fed2","Type":"ContainerStarted","Data":"b9ba1172e747e1d34caec8378fccecc9c4992e4a4a4f15ba1edcb8c6357c3cf1"} Dec 05 17:19:02 crc kubenswrapper[4753]: I1205 17:19:02.368448 4753 generic.go:334] "Generic (PLEG): container finished" podID="b29e1fdb-cc3c-412d-9194-8f40a860b5f2" containerID="f428534f4b512bbdca2ce2ee45c1bb0dd5265518e573514bdca2e139730804a3" exitCode=0 Dec 05 17:19:02 crc kubenswrapper[4753]: I1205 17:19:02.368493 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6ndfl" event={"ID":"b29e1fdb-cc3c-412d-9194-8f40a860b5f2","Type":"ContainerDied","Data":"f428534f4b512bbdca2ce2ee45c1bb0dd5265518e573514bdca2e139730804a3"} Dec 05 17:19:02 crc kubenswrapper[4753]: I1205 17:19:02.374135 4753 generic.go:334] "Generic (PLEG): container finished" podID="c8552445-6e0f-49f6-920a-46a2b6604221" containerID="474bab5623d1a26cafab9c336bdf13f5e3ad955c263c90ffdc3d26b118cdf5bb" exitCode=0 Dec 05 17:19:02 crc kubenswrapper[4753]: I1205 17:19:02.375083 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t2mtx" event={"ID":"c8552445-6e0f-49f6-920a-46a2b6604221","Type":"ContainerDied","Data":"474bab5623d1a26cafab9c336bdf13f5e3ad955c263c90ffdc3d26b118cdf5bb"} Dec 05 17:19:02 crc kubenswrapper[4753]: I1205 17:19:02.506701 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dsvw6" Dec 05 17:19:02 crc kubenswrapper[4753]: I1205 17:19:02.577134 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-mvmjf"] Dec 05 17:19:02 crc kubenswrapper[4753]: I1205 17:19:02.578856 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mvmjf" Dec 05 17:19:02 crc kubenswrapper[4753]: I1205 17:19:02.595535 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-mvmjf"] Dec 05 17:19:02 crc kubenswrapper[4753]: I1205 17:19:02.642676 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8b0eef2-60df-47b7-bd8c-c0b10fe65f48-utilities\") pod \"community-operators-mvmjf\" (UID: \"b8b0eef2-60df-47b7-bd8c-c0b10fe65f48\") " pod="openshift-marketplace/community-operators-mvmjf" Dec 05 17:19:02 crc kubenswrapper[4753]: I1205 17:19:02.642735 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tr2cw\" (UniqueName: \"kubernetes.io/projected/b8b0eef2-60df-47b7-bd8c-c0b10fe65f48-kube-api-access-tr2cw\") pod \"community-operators-mvmjf\" (UID: \"b8b0eef2-60df-47b7-bd8c-c0b10fe65f48\") " pod="openshift-marketplace/community-operators-mvmjf" Dec 05 17:19:02 crc kubenswrapper[4753]: I1205 17:19:02.642788 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8b0eef2-60df-47b7-bd8c-c0b10fe65f48-catalog-content\") pod \"community-operators-mvmjf\" (UID: \"b8b0eef2-60df-47b7-bd8c-c0b10fe65f48\") " pod="openshift-marketplace/community-operators-mvmjf" Dec 05 17:19:02 crc kubenswrapper[4753]: I1205 17:19:02.744323 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8b0eef2-60df-47b7-bd8c-c0b10fe65f48-catalog-content\") pod \"community-operators-mvmjf\" (UID: \"b8b0eef2-60df-47b7-bd8c-c0b10fe65f48\") " pod="openshift-marketplace/community-operators-mvmjf" Dec 05 17:19:02 crc kubenswrapper[4753]: I1205 17:19:02.744959 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8b0eef2-60df-47b7-bd8c-c0b10fe65f48-catalog-content\") pod \"community-operators-mvmjf\" (UID: \"b8b0eef2-60df-47b7-bd8c-c0b10fe65f48\") " pod="openshift-marketplace/community-operators-mvmjf" Dec 05 17:19:02 crc kubenswrapper[4753]: I1205 17:19:02.745139 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8b0eef2-60df-47b7-bd8c-c0b10fe65f48-utilities\") pod \"community-operators-mvmjf\" (UID: \"b8b0eef2-60df-47b7-bd8c-c0b10fe65f48\") " pod="openshift-marketplace/community-operators-mvmjf" Dec 05 17:19:02 crc kubenswrapper[4753]: I1205 17:19:02.745189 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tr2cw\" (UniqueName: \"kubernetes.io/projected/b8b0eef2-60df-47b7-bd8c-c0b10fe65f48-kube-api-access-tr2cw\") pod \"community-operators-mvmjf\" (UID: \"b8b0eef2-60df-47b7-bd8c-c0b10fe65f48\") " pod="openshift-marketplace/community-operators-mvmjf" Dec 05 17:19:02 crc kubenswrapper[4753]: I1205 17:19:02.745796 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8b0eef2-60df-47b7-bd8c-c0b10fe65f48-utilities\") pod \"community-operators-mvmjf\" (UID: \"b8b0eef2-60df-47b7-bd8c-c0b10fe65f48\") " pod="openshift-marketplace/community-operators-mvmjf" Dec 05 17:19:02 crc kubenswrapper[4753]: I1205 17:19:02.766126 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tr2cw\" (UniqueName: \"kubernetes.io/projected/b8b0eef2-60df-47b7-bd8c-c0b10fe65f48-kube-api-access-tr2cw\") pod \"community-operators-mvmjf\" (UID: \"b8b0eef2-60df-47b7-bd8c-c0b10fe65f48\") " pod="openshift-marketplace/community-operators-mvmjf" Dec 05 17:19:02 crc kubenswrapper[4753]: I1205 17:19:02.920787 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mvmjf" Dec 05 17:19:02 crc kubenswrapper[4753]: I1205 17:19:02.955585 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dsvw6"] Dec 05 17:19:03 crc kubenswrapper[4753]: I1205 17:19:03.130488 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-mvmjf"] Dec 05 17:19:03 crc kubenswrapper[4753]: W1205 17:19:03.148387 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb8b0eef2_60df_47b7_bd8c_c0b10fe65f48.slice/crio-57cf83c52705a8301155019930288f9a451e197caf1e4c18fb014c972c7025d2 WatchSource:0}: Error finding container 57cf83c52705a8301155019930288f9a451e197caf1e4c18fb014c972c7025d2: Status 404 returned error can't find the container with id 57cf83c52705a8301155019930288f9a451e197caf1e4c18fb014c972c7025d2 Dec 05 17:19:03 crc kubenswrapper[4753]: I1205 17:19:03.178972 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-bm2xh"] Dec 05 17:19:03 crc kubenswrapper[4753]: I1205 17:19:03.180475 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bm2xh" Dec 05 17:19:03 crc kubenswrapper[4753]: I1205 17:19:03.185693 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 05 17:19:03 crc kubenswrapper[4753]: I1205 17:19:03.193460 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-bm2xh"] Dec 05 17:19:03 crc kubenswrapper[4753]: I1205 17:19:03.251508 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/21af1eb9-08c2-4c3c-a3b3-e02577bd18a2-catalog-content\") pod \"redhat-operators-bm2xh\" (UID: \"21af1eb9-08c2-4c3c-a3b3-e02577bd18a2\") " pod="openshift-marketplace/redhat-operators-bm2xh" Dec 05 17:19:03 crc kubenswrapper[4753]: I1205 17:19:03.251751 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/21af1eb9-08c2-4c3c-a3b3-e02577bd18a2-utilities\") pod \"redhat-operators-bm2xh\" (UID: \"21af1eb9-08c2-4c3c-a3b3-e02577bd18a2\") " pod="openshift-marketplace/redhat-operators-bm2xh" Dec 05 17:19:03 crc kubenswrapper[4753]: I1205 17:19:03.251782 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h2965\" (UniqueName: \"kubernetes.io/projected/21af1eb9-08c2-4c3c-a3b3-e02577bd18a2-kube-api-access-h2965\") pod \"redhat-operators-bm2xh\" (UID: \"21af1eb9-08c2-4c3c-a3b3-e02577bd18a2\") " pod="openshift-marketplace/redhat-operators-bm2xh" Dec 05 17:19:03 crc kubenswrapper[4753]: I1205 17:19:03.353907 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/21af1eb9-08c2-4c3c-a3b3-e02577bd18a2-catalog-content\") pod \"redhat-operators-bm2xh\" (UID: \"21af1eb9-08c2-4c3c-a3b3-e02577bd18a2\") " pod="openshift-marketplace/redhat-operators-bm2xh" Dec 05 17:19:03 crc kubenswrapper[4753]: I1205 17:19:03.354178 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/21af1eb9-08c2-4c3c-a3b3-e02577bd18a2-utilities\") pod \"redhat-operators-bm2xh\" (UID: \"21af1eb9-08c2-4c3c-a3b3-e02577bd18a2\") " pod="openshift-marketplace/redhat-operators-bm2xh" Dec 05 17:19:03 crc kubenswrapper[4753]: I1205 17:19:03.354209 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h2965\" (UniqueName: \"kubernetes.io/projected/21af1eb9-08c2-4c3c-a3b3-e02577bd18a2-kube-api-access-h2965\") pod \"redhat-operators-bm2xh\" (UID: \"21af1eb9-08c2-4c3c-a3b3-e02577bd18a2\") " pod="openshift-marketplace/redhat-operators-bm2xh" Dec 05 17:19:03 crc kubenswrapper[4753]: I1205 17:19:03.355129 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/21af1eb9-08c2-4c3c-a3b3-e02577bd18a2-catalog-content\") pod \"redhat-operators-bm2xh\" (UID: \"21af1eb9-08c2-4c3c-a3b3-e02577bd18a2\") " pod="openshift-marketplace/redhat-operators-bm2xh" Dec 05 17:19:03 crc kubenswrapper[4753]: I1205 17:19:03.355313 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/21af1eb9-08c2-4c3c-a3b3-e02577bd18a2-utilities\") pod \"redhat-operators-bm2xh\" (UID: \"21af1eb9-08c2-4c3c-a3b3-e02577bd18a2\") " pod="openshift-marketplace/redhat-operators-bm2xh" Dec 05 17:19:03 crc kubenswrapper[4753]: I1205 17:19:03.388733 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h2965\" (UniqueName: \"kubernetes.io/projected/21af1eb9-08c2-4c3c-a3b3-e02577bd18a2-kube-api-access-h2965\") pod \"redhat-operators-bm2xh\" (UID: \"21af1eb9-08c2-4c3c-a3b3-e02577bd18a2\") " pod="openshift-marketplace/redhat-operators-bm2xh" Dec 05 17:19:03 crc kubenswrapper[4753]: I1205 17:19:03.396803 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t2mtx" event={"ID":"c8552445-6e0f-49f6-920a-46a2b6604221","Type":"ContainerStarted","Data":"e7700ef8e5c16d7ae0df8ea3e9d491e556c07b086d37f7cdf590358082e9d4bb"} Dec 05 17:19:03 crc kubenswrapper[4753]: I1205 17:19:03.403640 4753 generic.go:334] "Generic (PLEG): container finished" podID="40a8284b-f439-41a8-a064-9582c9d50ec4" containerID="7f5dda5940849997154dd5574b8e79116694d28822dd761471f09406bd94e3f3" exitCode=0 Dec 05 17:19:03 crc kubenswrapper[4753]: I1205 17:19:03.403796 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dsvw6" event={"ID":"40a8284b-f439-41a8-a064-9582c9d50ec4","Type":"ContainerDied","Data":"7f5dda5940849997154dd5574b8e79116694d28822dd761471f09406bd94e3f3"} Dec 05 17:19:03 crc kubenswrapper[4753]: I1205 17:19:03.403843 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dsvw6" event={"ID":"40a8284b-f439-41a8-a064-9582c9d50ec4","Type":"ContainerStarted","Data":"06b9499168f48c93143ba8775bc151c2ae7347fa92222953c69d20b98bebdffa"} Dec 05 17:19:03 crc kubenswrapper[4753]: I1205 17:19:03.413262 4753 generic.go:334] "Generic (PLEG): container finished" podID="93a67264-59fa-4e32-868c-d4d308a5fed2" containerID="b9ba1172e747e1d34caec8378fccecc9c4992e4a4a4f15ba1edcb8c6357c3cf1" exitCode=0 Dec 05 17:19:03 crc kubenswrapper[4753]: I1205 17:19:03.413407 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-djnfb" event={"ID":"93a67264-59fa-4e32-868c-d4d308a5fed2","Type":"ContainerDied","Data":"b9ba1172e747e1d34caec8378fccecc9c4992e4a4a4f15ba1edcb8c6357c3cf1"} Dec 05 17:19:03 crc kubenswrapper[4753]: I1205 17:19:03.431282 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6ndfl" event={"ID":"b29e1fdb-cc3c-412d-9194-8f40a860b5f2","Type":"ContainerStarted","Data":"491d7dbfe9bd9e2fd64aa3ff11c3b01f47f1735761078b568caf825ef0922bd5"} Dec 05 17:19:03 crc kubenswrapper[4753]: I1205 17:19:03.433598 4753 generic.go:334] "Generic (PLEG): container finished" podID="b8b0eef2-60df-47b7-bd8c-c0b10fe65f48" containerID="efdfacd615f8e0ca62937bd76647d1eb1c6fe252d816bfc37fd2dd0b4a9da32c" exitCode=0 Dec 05 17:19:03 crc kubenswrapper[4753]: I1205 17:19:03.433640 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mvmjf" event={"ID":"b8b0eef2-60df-47b7-bd8c-c0b10fe65f48","Type":"ContainerDied","Data":"efdfacd615f8e0ca62937bd76647d1eb1c6fe252d816bfc37fd2dd0b4a9da32c"} Dec 05 17:19:03 crc kubenswrapper[4753]: I1205 17:19:03.433658 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mvmjf" event={"ID":"b8b0eef2-60df-47b7-bd8c-c0b10fe65f48","Type":"ContainerStarted","Data":"57cf83c52705a8301155019930288f9a451e197caf1e4c18fb014c972c7025d2"} Dec 05 17:19:03 crc kubenswrapper[4753]: I1205 17:19:03.491076 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-6ndfl" podStartSLOduration=3.054413957 podStartE2EDuration="4.491049616s" podCreationTimestamp="2025-12-05 17:18:59 +0000 UTC" firstStartedPulling="2025-12-05 17:19:01.36238517 +0000 UTC m=+879.865492176" lastFinishedPulling="2025-12-05 17:19:02.799020829 +0000 UTC m=+881.302127835" observedRunningTime="2025-12-05 17:19:03.488454502 +0000 UTC m=+881.991561518" watchObservedRunningTime="2025-12-05 17:19:03.491049616 +0000 UTC m=+881.994156612" Dec 05 17:19:03 crc kubenswrapper[4753]: I1205 17:19:03.580383 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bm2xh" Dec 05 17:19:03 crc kubenswrapper[4753]: I1205 17:19:03.893533 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-bm2xh"] Dec 05 17:19:04 crc kubenswrapper[4753]: I1205 17:19:04.443846 4753 generic.go:334] "Generic (PLEG): container finished" podID="21af1eb9-08c2-4c3c-a3b3-e02577bd18a2" containerID="5fb3fa3fd0ec7615cfcb6a2fbfc5c2c3f574129c82d449adee549a6a5eefa91c" exitCode=0 Dec 05 17:19:04 crc kubenswrapper[4753]: I1205 17:19:04.444264 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bm2xh" event={"ID":"21af1eb9-08c2-4c3c-a3b3-e02577bd18a2","Type":"ContainerDied","Data":"5fb3fa3fd0ec7615cfcb6a2fbfc5c2c3f574129c82d449adee549a6a5eefa91c"} Dec 05 17:19:04 crc kubenswrapper[4753]: I1205 17:19:04.444485 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bm2xh" event={"ID":"21af1eb9-08c2-4c3c-a3b3-e02577bd18a2","Type":"ContainerStarted","Data":"80ff4ffcd95f536281a0619a62e6d0989106e0640f21faf0ec7e5bf738227daa"} Dec 05 17:19:04 crc kubenswrapper[4753]: I1205 17:19:04.449461 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mvmjf" event={"ID":"b8b0eef2-60df-47b7-bd8c-c0b10fe65f48","Type":"ContainerStarted","Data":"bcbff47c4b57dbaa8ff409f545ae3cbcfe7811e200fb0e6c75cd563e42c53308"} Dec 05 17:19:04 crc kubenswrapper[4753]: I1205 17:19:04.456431 4753 generic.go:334] "Generic (PLEG): container finished" podID="c8552445-6e0f-49f6-920a-46a2b6604221" containerID="e7700ef8e5c16d7ae0df8ea3e9d491e556c07b086d37f7cdf590358082e9d4bb" exitCode=0 Dec 05 17:19:04 crc kubenswrapper[4753]: I1205 17:19:04.456517 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t2mtx" event={"ID":"c8552445-6e0f-49f6-920a-46a2b6604221","Type":"ContainerDied","Data":"e7700ef8e5c16d7ae0df8ea3e9d491e556c07b086d37f7cdf590358082e9d4bb"} Dec 05 17:19:04 crc kubenswrapper[4753]: I1205 17:19:04.461115 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dsvw6" event={"ID":"40a8284b-f439-41a8-a064-9582c9d50ec4","Type":"ContainerStarted","Data":"8be76c799a2cd25b3ce2fba40c9db822b8a340b91ee0c95388f3c49be1a606f6"} Dec 05 17:19:04 crc kubenswrapper[4753]: I1205 17:19:04.468755 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-djnfb" event={"ID":"93a67264-59fa-4e32-868c-d4d308a5fed2","Type":"ContainerStarted","Data":"5e915dc562c73a7bc10f1853bc85593c541bd1b68de55dc4268895b09b23fb99"} Dec 05 17:19:04 crc kubenswrapper[4753]: I1205 17:19:04.576839 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-djnfb" podStartSLOduration=2.038993977 podStartE2EDuration="4.576806769s" podCreationTimestamp="2025-12-05 17:19:00 +0000 UTC" firstStartedPulling="2025-12-05 17:19:01.362258826 +0000 UTC m=+879.865365832" lastFinishedPulling="2025-12-05 17:19:03.900071608 +0000 UTC m=+882.403178624" observedRunningTime="2025-12-05 17:19:04.568920855 +0000 UTC m=+883.072027861" watchObservedRunningTime="2025-12-05 17:19:04.576806769 +0000 UTC m=+883.079913775" Dec 05 17:19:05 crc kubenswrapper[4753]: I1205 17:19:05.476660 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bm2xh" event={"ID":"21af1eb9-08c2-4c3c-a3b3-e02577bd18a2","Type":"ContainerStarted","Data":"e462be4ac109a10095ef87f5ac07c35d81685b60e5cb85d5e5851ce25e424791"} Dec 05 17:19:05 crc kubenswrapper[4753]: I1205 17:19:05.479369 4753 generic.go:334] "Generic (PLEG): container finished" podID="b8b0eef2-60df-47b7-bd8c-c0b10fe65f48" containerID="bcbff47c4b57dbaa8ff409f545ae3cbcfe7811e200fb0e6c75cd563e42c53308" exitCode=0 Dec 05 17:19:05 crc kubenswrapper[4753]: I1205 17:19:05.479458 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mvmjf" event={"ID":"b8b0eef2-60df-47b7-bd8c-c0b10fe65f48","Type":"ContainerDied","Data":"bcbff47c4b57dbaa8ff409f545ae3cbcfe7811e200fb0e6c75cd563e42c53308"} Dec 05 17:19:05 crc kubenswrapper[4753]: I1205 17:19:05.482488 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t2mtx" event={"ID":"c8552445-6e0f-49f6-920a-46a2b6604221","Type":"ContainerStarted","Data":"deb67d4a9bf6ee92a93061124c8e223f58b7d71f7fd33ffb55dbf4044034aa7d"} Dec 05 17:19:05 crc kubenswrapper[4753]: I1205 17:19:05.485941 4753 generic.go:334] "Generic (PLEG): container finished" podID="40a8284b-f439-41a8-a064-9582c9d50ec4" containerID="8be76c799a2cd25b3ce2fba40c9db822b8a340b91ee0c95388f3c49be1a606f6" exitCode=0 Dec 05 17:19:05 crc kubenswrapper[4753]: I1205 17:19:05.486064 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dsvw6" event={"ID":"40a8284b-f439-41a8-a064-9582c9d50ec4","Type":"ContainerDied","Data":"8be76c799a2cd25b3ce2fba40c9db822b8a340b91ee0c95388f3c49be1a606f6"} Dec 05 17:19:05 crc kubenswrapper[4753]: I1205 17:19:05.486207 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dsvw6" event={"ID":"40a8284b-f439-41a8-a064-9582c9d50ec4","Type":"ContainerStarted","Data":"96d952c44a756fe595943652538fcf7206cc5ba2407592401586f7111522ba08"} Dec 05 17:19:05 crc kubenswrapper[4753]: I1205 17:19:05.558994 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-t2mtx" podStartSLOduration=3.089181029 podStartE2EDuration="5.558962944s" podCreationTimestamp="2025-12-05 17:19:00 +0000 UTC" firstStartedPulling="2025-12-05 17:19:02.375727712 +0000 UTC m=+880.878834718" lastFinishedPulling="2025-12-05 17:19:04.845509627 +0000 UTC m=+883.348616633" observedRunningTime="2025-12-05 17:19:05.557099581 +0000 UTC m=+884.060206587" watchObservedRunningTime="2025-12-05 17:19:05.558962944 +0000 UTC m=+884.062069960" Dec 05 17:19:05 crc kubenswrapper[4753]: I1205 17:19:05.593710 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-dsvw6" podStartSLOduration=2.180584912 podStartE2EDuration="3.593688312s" podCreationTimestamp="2025-12-05 17:19:02 +0000 UTC" firstStartedPulling="2025-12-05 17:19:03.407775566 +0000 UTC m=+881.910882572" lastFinishedPulling="2025-12-05 17:19:04.820878966 +0000 UTC m=+883.323985972" observedRunningTime="2025-12-05 17:19:05.58905162 +0000 UTC m=+884.092158646" watchObservedRunningTime="2025-12-05 17:19:05.593688312 +0000 UTC m=+884.096795318" Dec 05 17:19:06 crc kubenswrapper[4753]: I1205 17:19:06.495804 4753 generic.go:334] "Generic (PLEG): container finished" podID="21af1eb9-08c2-4c3c-a3b3-e02577bd18a2" containerID="e462be4ac109a10095ef87f5ac07c35d81685b60e5cb85d5e5851ce25e424791" exitCode=0 Dec 05 17:19:06 crc kubenswrapper[4753]: I1205 17:19:06.496255 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bm2xh" event={"ID":"21af1eb9-08c2-4c3c-a3b3-e02577bd18a2","Type":"ContainerDied","Data":"e462be4ac109a10095ef87f5ac07c35d81685b60e5cb85d5e5851ce25e424791"} Dec 05 17:19:06 crc kubenswrapper[4753]: I1205 17:19:06.503661 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mvmjf" event={"ID":"b8b0eef2-60df-47b7-bd8c-c0b10fe65f48","Type":"ContainerStarted","Data":"d032f0270e5b66c231f2f85bdcb58349e8f440e8ab0320e5dc7d449e2b0931dd"} Dec 05 17:19:06 crc kubenswrapper[4753]: I1205 17:19:06.553137 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-mvmjf" podStartSLOduration=2.133200203 podStartE2EDuration="4.553117559s" podCreationTimestamp="2025-12-05 17:19:02 +0000 UTC" firstStartedPulling="2025-12-05 17:19:03.436931616 +0000 UTC m=+881.940038632" lastFinishedPulling="2025-12-05 17:19:05.856848972 +0000 UTC m=+884.359955988" observedRunningTime="2025-12-05 17:19:06.547049586 +0000 UTC m=+885.050156592" watchObservedRunningTime="2025-12-05 17:19:06.553117559 +0000 UTC m=+885.056224555" Dec 05 17:19:07 crc kubenswrapper[4753]: I1205 17:19:07.519371 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bm2xh" event={"ID":"21af1eb9-08c2-4c3c-a3b3-e02577bd18a2","Type":"ContainerStarted","Data":"a0e417cafcd9f83868ada00024654effdc8e474d0e5095beb784c7dd77ec6db9"} Dec 05 17:19:07 crc kubenswrapper[4753]: I1205 17:19:07.566346 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-bm2xh" podStartSLOduration=2.102295606 podStartE2EDuration="4.566312907s" podCreationTimestamp="2025-12-05 17:19:03 +0000 UTC" firstStartedPulling="2025-12-05 17:19:04.446591943 +0000 UTC m=+882.949698949" lastFinishedPulling="2025-12-05 17:19:06.910609244 +0000 UTC m=+885.413716250" observedRunningTime="2025-12-05 17:19:07.560281685 +0000 UTC m=+886.063388711" watchObservedRunningTime="2025-12-05 17:19:07.566312907 +0000 UTC m=+886.069419913" Dec 05 17:19:10 crc kubenswrapper[4753]: I1205 17:19:10.094521 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-6ndfl" Dec 05 17:19:10 crc kubenswrapper[4753]: I1205 17:19:10.095032 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-6ndfl" Dec 05 17:19:10 crc kubenswrapper[4753]: I1205 17:19:10.150459 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-6ndfl" Dec 05 17:19:10 crc kubenswrapper[4753]: I1205 17:19:10.590579 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-6ndfl" Dec 05 17:19:10 crc kubenswrapper[4753]: I1205 17:19:10.748992 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-djnfb" Dec 05 17:19:10 crc kubenswrapper[4753]: I1205 17:19:10.749075 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-djnfb" Dec 05 17:19:10 crc kubenswrapper[4753]: I1205 17:19:10.795839 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-djnfb" Dec 05 17:19:11 crc kubenswrapper[4753]: I1205 17:19:11.099583 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-t2mtx" Dec 05 17:19:11 crc kubenswrapper[4753]: I1205 17:19:11.099687 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-t2mtx" Dec 05 17:19:11 crc kubenswrapper[4753]: I1205 17:19:11.152717 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-t2mtx" Dec 05 17:19:11 crc kubenswrapper[4753]: I1205 17:19:11.596829 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-t2mtx" Dec 05 17:19:11 crc kubenswrapper[4753]: I1205 17:19:11.604045 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-djnfb" Dec 05 17:19:12 crc kubenswrapper[4753]: I1205 17:19:12.506947 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-dsvw6" Dec 05 17:19:12 crc kubenswrapper[4753]: I1205 17:19:12.507021 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-dsvw6" Dec 05 17:19:12 crc kubenswrapper[4753]: I1205 17:19:12.549693 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-dsvw6" Dec 05 17:19:12 crc kubenswrapper[4753]: I1205 17:19:12.600084 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-dsvw6" Dec 05 17:19:12 crc kubenswrapper[4753]: I1205 17:19:12.921754 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-mvmjf" Dec 05 17:19:12 crc kubenswrapper[4753]: I1205 17:19:12.921846 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-mvmjf" Dec 05 17:19:12 crc kubenswrapper[4753]: I1205 17:19:12.975802 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-mvmjf" Dec 05 17:19:13 crc kubenswrapper[4753]: I1205 17:19:13.573024 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-t2mtx"] Dec 05 17:19:13 crc kubenswrapper[4753]: I1205 17:19:13.574003 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-t2mtx" podUID="c8552445-6e0f-49f6-920a-46a2b6604221" containerName="registry-server" containerID="cri-o://deb67d4a9bf6ee92a93061124c8e223f58b7d71f7fd33ffb55dbf4044034aa7d" gracePeriod=2 Dec 05 17:19:13 crc kubenswrapper[4753]: I1205 17:19:13.581775 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-bm2xh" Dec 05 17:19:13 crc kubenswrapper[4753]: I1205 17:19:13.581815 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-bm2xh" Dec 05 17:19:13 crc kubenswrapper[4753]: I1205 17:19:13.611698 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-mvmjf" Dec 05 17:19:13 crc kubenswrapper[4753]: I1205 17:19:13.629638 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-bm2xh" Dec 05 17:19:13 crc kubenswrapper[4753]: E1205 17:19:13.782463 4753 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc8552445_6e0f_49f6_920a_46a2b6604221.slice/crio-deb67d4a9bf6ee92a93061124c8e223f58b7d71f7fd33ffb55dbf4044034aa7d.scope\": RecentStats: unable to find data in memory cache]" Dec 05 17:19:14 crc kubenswrapper[4753]: I1205 17:19:14.646788 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-bm2xh" Dec 05 17:19:15 crc kubenswrapper[4753]: I1205 17:19:15.964381 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-mvmjf"] Dec 05 17:19:15 crc kubenswrapper[4753]: I1205 17:19:15.965043 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-mvmjf" podUID="b8b0eef2-60df-47b7-bd8c-c0b10fe65f48" containerName="registry-server" containerID="cri-o://d032f0270e5b66c231f2f85bdcb58349e8f440e8ab0320e5dc7d449e2b0931dd" gracePeriod=2 Dec 05 17:19:17 crc kubenswrapper[4753]: I1205 17:19:17.484775 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mvmjf" Dec 05 17:19:17 crc kubenswrapper[4753]: I1205 17:19:17.505780 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8b0eef2-60df-47b7-bd8c-c0b10fe65f48-utilities\") pod \"b8b0eef2-60df-47b7-bd8c-c0b10fe65f48\" (UID: \"b8b0eef2-60df-47b7-bd8c-c0b10fe65f48\") " Dec 05 17:19:17 crc kubenswrapper[4753]: I1205 17:19:17.505900 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tr2cw\" (UniqueName: \"kubernetes.io/projected/b8b0eef2-60df-47b7-bd8c-c0b10fe65f48-kube-api-access-tr2cw\") pod \"b8b0eef2-60df-47b7-bd8c-c0b10fe65f48\" (UID: \"b8b0eef2-60df-47b7-bd8c-c0b10fe65f48\") " Dec 05 17:19:17 crc kubenswrapper[4753]: I1205 17:19:17.505995 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8b0eef2-60df-47b7-bd8c-c0b10fe65f48-catalog-content\") pod \"b8b0eef2-60df-47b7-bd8c-c0b10fe65f48\" (UID: \"b8b0eef2-60df-47b7-bd8c-c0b10fe65f48\") " Dec 05 17:19:17 crc kubenswrapper[4753]: I1205 17:19:17.506623 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b8b0eef2-60df-47b7-bd8c-c0b10fe65f48-utilities" (OuterVolumeSpecName: "utilities") pod "b8b0eef2-60df-47b7-bd8c-c0b10fe65f48" (UID: "b8b0eef2-60df-47b7-bd8c-c0b10fe65f48"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:19:17 crc kubenswrapper[4753]: I1205 17:19:17.512899 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b8b0eef2-60df-47b7-bd8c-c0b10fe65f48-kube-api-access-tr2cw" (OuterVolumeSpecName: "kube-api-access-tr2cw") pod "b8b0eef2-60df-47b7-bd8c-c0b10fe65f48" (UID: "b8b0eef2-60df-47b7-bd8c-c0b10fe65f48"). InnerVolumeSpecName "kube-api-access-tr2cw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:19:17 crc kubenswrapper[4753]: I1205 17:19:17.569786 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b8b0eef2-60df-47b7-bd8c-c0b10fe65f48-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b8b0eef2-60df-47b7-bd8c-c0b10fe65f48" (UID: "b8b0eef2-60df-47b7-bd8c-c0b10fe65f48"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:19:17 crc kubenswrapper[4753]: I1205 17:19:17.600419 4753 generic.go:334] "Generic (PLEG): container finished" podID="b8b0eef2-60df-47b7-bd8c-c0b10fe65f48" containerID="d032f0270e5b66c231f2f85bdcb58349e8f440e8ab0320e5dc7d449e2b0931dd" exitCode=0 Dec 05 17:19:17 crc kubenswrapper[4753]: I1205 17:19:17.600546 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mvmjf" event={"ID":"b8b0eef2-60df-47b7-bd8c-c0b10fe65f48","Type":"ContainerDied","Data":"d032f0270e5b66c231f2f85bdcb58349e8f440e8ab0320e5dc7d449e2b0931dd"} Dec 05 17:19:17 crc kubenswrapper[4753]: I1205 17:19:17.600596 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mvmjf" event={"ID":"b8b0eef2-60df-47b7-bd8c-c0b10fe65f48","Type":"ContainerDied","Data":"57cf83c52705a8301155019930288f9a451e197caf1e4c18fb014c972c7025d2"} Dec 05 17:19:17 crc kubenswrapper[4753]: I1205 17:19:17.600622 4753 scope.go:117] "RemoveContainer" containerID="d032f0270e5b66c231f2f85bdcb58349e8f440e8ab0320e5dc7d449e2b0931dd" Dec 05 17:19:17 crc kubenswrapper[4753]: I1205 17:19:17.600810 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mvmjf" Dec 05 17:19:17 crc kubenswrapper[4753]: I1205 17:19:17.607499 4753 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8b0eef2-60df-47b7-bd8c-c0b10fe65f48-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 17:19:17 crc kubenswrapper[4753]: I1205 17:19:17.607532 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tr2cw\" (UniqueName: \"kubernetes.io/projected/b8b0eef2-60df-47b7-bd8c-c0b10fe65f48-kube-api-access-tr2cw\") on node \"crc\" DevicePath \"\"" Dec 05 17:19:17 crc kubenswrapper[4753]: I1205 17:19:17.607547 4753 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8b0eef2-60df-47b7-bd8c-c0b10fe65f48-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 17:19:17 crc kubenswrapper[4753]: I1205 17:19:17.622089 4753 generic.go:334] "Generic (PLEG): container finished" podID="c8552445-6e0f-49f6-920a-46a2b6604221" containerID="deb67d4a9bf6ee92a93061124c8e223f58b7d71f7fd33ffb55dbf4044034aa7d" exitCode=0 Dec 05 17:19:17 crc kubenswrapper[4753]: I1205 17:19:17.622188 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t2mtx" event={"ID":"c8552445-6e0f-49f6-920a-46a2b6604221","Type":"ContainerDied","Data":"deb67d4a9bf6ee92a93061124c8e223f58b7d71f7fd33ffb55dbf4044034aa7d"} Dec 05 17:19:17 crc kubenswrapper[4753]: I1205 17:19:17.639971 4753 scope.go:117] "RemoveContainer" containerID="bcbff47c4b57dbaa8ff409f545ae3cbcfe7811e200fb0e6c75cd563e42c53308" Dec 05 17:19:17 crc kubenswrapper[4753]: I1205 17:19:17.647306 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-mvmjf"] Dec 05 17:19:17 crc kubenswrapper[4753]: I1205 17:19:17.655077 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-mvmjf"] Dec 05 17:19:17 crc kubenswrapper[4753]: I1205 17:19:17.705121 4753 scope.go:117] "RemoveContainer" containerID="efdfacd615f8e0ca62937bd76647d1eb1c6fe252d816bfc37fd2dd0b4a9da32c" Dec 05 17:19:17 crc kubenswrapper[4753]: I1205 17:19:17.734641 4753 scope.go:117] "RemoveContainer" containerID="d032f0270e5b66c231f2f85bdcb58349e8f440e8ab0320e5dc7d449e2b0931dd" Dec 05 17:19:17 crc kubenswrapper[4753]: E1205 17:19:17.736778 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d032f0270e5b66c231f2f85bdcb58349e8f440e8ab0320e5dc7d449e2b0931dd\": container with ID starting with d032f0270e5b66c231f2f85bdcb58349e8f440e8ab0320e5dc7d449e2b0931dd not found: ID does not exist" containerID="d032f0270e5b66c231f2f85bdcb58349e8f440e8ab0320e5dc7d449e2b0931dd" Dec 05 17:19:17 crc kubenswrapper[4753]: I1205 17:19:17.736810 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d032f0270e5b66c231f2f85bdcb58349e8f440e8ab0320e5dc7d449e2b0931dd"} err="failed to get container status \"d032f0270e5b66c231f2f85bdcb58349e8f440e8ab0320e5dc7d449e2b0931dd\": rpc error: code = NotFound desc = could not find container \"d032f0270e5b66c231f2f85bdcb58349e8f440e8ab0320e5dc7d449e2b0931dd\": container with ID starting with d032f0270e5b66c231f2f85bdcb58349e8f440e8ab0320e5dc7d449e2b0931dd not found: ID does not exist" Dec 05 17:19:17 crc kubenswrapper[4753]: I1205 17:19:17.736841 4753 scope.go:117] "RemoveContainer" containerID="bcbff47c4b57dbaa8ff409f545ae3cbcfe7811e200fb0e6c75cd563e42c53308" Dec 05 17:19:17 crc kubenswrapper[4753]: E1205 17:19:17.737568 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bcbff47c4b57dbaa8ff409f545ae3cbcfe7811e200fb0e6c75cd563e42c53308\": container with ID starting with bcbff47c4b57dbaa8ff409f545ae3cbcfe7811e200fb0e6c75cd563e42c53308 not found: ID does not exist" containerID="bcbff47c4b57dbaa8ff409f545ae3cbcfe7811e200fb0e6c75cd563e42c53308" Dec 05 17:19:17 crc kubenswrapper[4753]: I1205 17:19:17.737595 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bcbff47c4b57dbaa8ff409f545ae3cbcfe7811e200fb0e6c75cd563e42c53308"} err="failed to get container status \"bcbff47c4b57dbaa8ff409f545ae3cbcfe7811e200fb0e6c75cd563e42c53308\": rpc error: code = NotFound desc = could not find container \"bcbff47c4b57dbaa8ff409f545ae3cbcfe7811e200fb0e6c75cd563e42c53308\": container with ID starting with bcbff47c4b57dbaa8ff409f545ae3cbcfe7811e200fb0e6c75cd563e42c53308 not found: ID does not exist" Dec 05 17:19:17 crc kubenswrapper[4753]: I1205 17:19:17.737613 4753 scope.go:117] "RemoveContainer" containerID="efdfacd615f8e0ca62937bd76647d1eb1c6fe252d816bfc37fd2dd0b4a9da32c" Dec 05 17:19:17 crc kubenswrapper[4753]: E1205 17:19:17.738429 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"efdfacd615f8e0ca62937bd76647d1eb1c6fe252d816bfc37fd2dd0b4a9da32c\": container with ID starting with efdfacd615f8e0ca62937bd76647d1eb1c6fe252d816bfc37fd2dd0b4a9da32c not found: ID does not exist" containerID="efdfacd615f8e0ca62937bd76647d1eb1c6fe252d816bfc37fd2dd0b4a9da32c" Dec 05 17:19:17 crc kubenswrapper[4753]: I1205 17:19:17.738448 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"efdfacd615f8e0ca62937bd76647d1eb1c6fe252d816bfc37fd2dd0b4a9da32c"} err="failed to get container status \"efdfacd615f8e0ca62937bd76647d1eb1c6fe252d816bfc37fd2dd0b4a9da32c\": rpc error: code = NotFound desc = could not find container \"efdfacd615f8e0ca62937bd76647d1eb1c6fe252d816bfc37fd2dd0b4a9da32c\": container with ID starting with efdfacd615f8e0ca62937bd76647d1eb1c6fe252d816bfc37fd2dd0b4a9da32c not found: ID does not exist" Dec 05 17:19:17 crc kubenswrapper[4753]: I1205 17:19:17.747919 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b8b0eef2-60df-47b7-bd8c-c0b10fe65f48" path="/var/lib/kubelet/pods/b8b0eef2-60df-47b7-bd8c-c0b10fe65f48/volumes" Dec 05 17:19:17 crc kubenswrapper[4753]: I1205 17:19:17.831782 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-t2mtx" Dec 05 17:19:17 crc kubenswrapper[4753]: I1205 17:19:17.926599 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jdb76\" (UniqueName: \"kubernetes.io/projected/c8552445-6e0f-49f6-920a-46a2b6604221-kube-api-access-jdb76\") pod \"c8552445-6e0f-49f6-920a-46a2b6604221\" (UID: \"c8552445-6e0f-49f6-920a-46a2b6604221\") " Dec 05 17:19:17 crc kubenswrapper[4753]: I1205 17:19:17.927265 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c8552445-6e0f-49f6-920a-46a2b6604221-catalog-content\") pod \"c8552445-6e0f-49f6-920a-46a2b6604221\" (UID: \"c8552445-6e0f-49f6-920a-46a2b6604221\") " Dec 05 17:19:17 crc kubenswrapper[4753]: I1205 17:19:17.927667 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c8552445-6e0f-49f6-920a-46a2b6604221-utilities\") pod \"c8552445-6e0f-49f6-920a-46a2b6604221\" (UID: \"c8552445-6e0f-49f6-920a-46a2b6604221\") " Dec 05 17:19:17 crc kubenswrapper[4753]: I1205 17:19:17.929308 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c8552445-6e0f-49f6-920a-46a2b6604221-utilities" (OuterVolumeSpecName: "utilities") pod "c8552445-6e0f-49f6-920a-46a2b6604221" (UID: "c8552445-6e0f-49f6-920a-46a2b6604221"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:19:17 crc kubenswrapper[4753]: I1205 17:19:17.932442 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c8552445-6e0f-49f6-920a-46a2b6604221-kube-api-access-jdb76" (OuterVolumeSpecName: "kube-api-access-jdb76") pod "c8552445-6e0f-49f6-920a-46a2b6604221" (UID: "c8552445-6e0f-49f6-920a-46a2b6604221"). InnerVolumeSpecName "kube-api-access-jdb76". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:19:17 crc kubenswrapper[4753]: I1205 17:19:17.984973 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c8552445-6e0f-49f6-920a-46a2b6604221-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c8552445-6e0f-49f6-920a-46a2b6604221" (UID: "c8552445-6e0f-49f6-920a-46a2b6604221"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:19:18 crc kubenswrapper[4753]: I1205 17:19:18.030773 4753 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c8552445-6e0f-49f6-920a-46a2b6604221-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 17:19:18 crc kubenswrapper[4753]: I1205 17:19:18.030842 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jdb76\" (UniqueName: \"kubernetes.io/projected/c8552445-6e0f-49f6-920a-46a2b6604221-kube-api-access-jdb76\") on node \"crc\" DevicePath \"\"" Dec 05 17:19:18 crc kubenswrapper[4753]: I1205 17:19:18.030861 4753 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c8552445-6e0f-49f6-920a-46a2b6604221-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 17:19:18 crc kubenswrapper[4753]: I1205 17:19:18.636142 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t2mtx" event={"ID":"c8552445-6e0f-49f6-920a-46a2b6604221","Type":"ContainerDied","Data":"5ecb9b81746c64e3cc94b7d46214e507fa26df09ee34c366f20237ed9db0ad59"} Dec 05 17:19:18 crc kubenswrapper[4753]: I1205 17:19:18.636253 4753 scope.go:117] "RemoveContainer" containerID="deb67d4a9bf6ee92a93061124c8e223f58b7d71f7fd33ffb55dbf4044034aa7d" Dec 05 17:19:18 crc kubenswrapper[4753]: I1205 17:19:18.636293 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-t2mtx" Dec 05 17:19:18 crc kubenswrapper[4753]: I1205 17:19:18.675781 4753 scope.go:117] "RemoveContainer" containerID="e7700ef8e5c16d7ae0df8ea3e9d491e556c07b086d37f7cdf590358082e9d4bb" Dec 05 17:19:18 crc kubenswrapper[4753]: I1205 17:19:18.683689 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-t2mtx"] Dec 05 17:19:18 crc kubenswrapper[4753]: I1205 17:19:18.689993 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-t2mtx"] Dec 05 17:19:18 crc kubenswrapper[4753]: I1205 17:19:18.702780 4753 scope.go:117] "RemoveContainer" containerID="474bab5623d1a26cafab9c336bdf13f5e3ad955c263c90ffdc3d26b118cdf5bb" Dec 05 17:19:19 crc kubenswrapper[4753]: I1205 17:19:19.728107 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c8552445-6e0f-49f6-920a-46a2b6604221" path="/var/lib/kubelet/pods/c8552445-6e0f-49f6-920a-46a2b6604221/volumes" Dec 05 17:20:58 crc kubenswrapper[4753]: I1205 17:20:58.979647 4753 patch_prober.go:28] interesting pod/machine-config-daemon-khn68 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 17:20:58 crc kubenswrapper[4753]: I1205 17:20:58.980874 4753 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 17:21:28 crc kubenswrapper[4753]: I1205 17:21:28.979897 4753 patch_prober.go:28] interesting pod/machine-config-daemon-khn68 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 17:21:28 crc kubenswrapper[4753]: I1205 17:21:28.981025 4753 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 17:21:58 crc kubenswrapper[4753]: I1205 17:21:58.979772 4753 patch_prober.go:28] interesting pod/machine-config-daemon-khn68 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 17:21:58 crc kubenswrapper[4753]: I1205 17:21:58.980790 4753 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 17:21:58 crc kubenswrapper[4753]: I1205 17:21:58.980874 4753 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-khn68" Dec 05 17:21:58 crc kubenswrapper[4753]: I1205 17:21:58.981988 4753 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7ca9474d05ab84ff7ea8a657d534bd38e1cef3f72237d6a798a2a1e2c1383e43"} pod="openshift-machine-config-operator/machine-config-daemon-khn68" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 17:21:58 crc kubenswrapper[4753]: I1205 17:21:58.982092 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerName="machine-config-daemon" containerID="cri-o://7ca9474d05ab84ff7ea8a657d534bd38e1cef3f72237d6a798a2a1e2c1383e43" gracePeriod=600 Dec 05 17:22:00 crc kubenswrapper[4753]: I1205 17:22:00.071180 4753 generic.go:334] "Generic (PLEG): container finished" podID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerID="7ca9474d05ab84ff7ea8a657d534bd38e1cef3f72237d6a798a2a1e2c1383e43" exitCode=0 Dec 05 17:22:00 crc kubenswrapper[4753]: I1205 17:22:00.071306 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" event={"ID":"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68","Type":"ContainerDied","Data":"7ca9474d05ab84ff7ea8a657d534bd38e1cef3f72237d6a798a2a1e2c1383e43"} Dec 05 17:22:00 crc kubenswrapper[4753]: I1205 17:22:00.072332 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" event={"ID":"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68","Type":"ContainerStarted","Data":"2170ea733cb4ab2e379e8299ee324bec350d8db90db1f1068d3bbeda7a75cd94"} Dec 05 17:22:00 crc kubenswrapper[4753]: I1205 17:22:00.072424 4753 scope.go:117] "RemoveContainer" containerID="bba70de976ea341407c3eb9e1abd5171c31bb8762b60796e6f36abd1b897024d" Dec 05 17:22:22 crc kubenswrapper[4753]: I1205 17:22:22.312626 4753 scope.go:117] "RemoveContainer" containerID="89d617488881ea551da8d5f7b1b4de779fae8d85edebd057f128f25d066bfd46" Dec 05 17:22:22 crc kubenswrapper[4753]: I1205 17:22:22.342348 4753 scope.go:117] "RemoveContainer" containerID="77aa4db91895a2e7731a54a5e02f0fc9756c6fb22930186082e8b1366705dcad" Dec 05 17:22:22 crc kubenswrapper[4753]: I1205 17:22:22.369166 4753 scope.go:117] "RemoveContainer" containerID="fa2cf6674356bbcb1673ccaf20c6b9a617d3bbe5d494397fbe44db924b7e09b3" Dec 05 17:24:22 crc kubenswrapper[4753]: I1205 17:24:22.429401 4753 scope.go:117] "RemoveContainer" containerID="0d7d8b98fa5cc6ffb3eeef70c87bb444acef1296ca29325a990ea339329a6217" Dec 05 17:24:22 crc kubenswrapper[4753]: I1205 17:24:22.453815 4753 scope.go:117] "RemoveContainer" containerID="bb13c9fda3a9d693ffa8175ec1eb90425c15e91a6a964b192b86b16e3d6c5361" Dec 05 17:24:22 crc kubenswrapper[4753]: I1205 17:24:22.482194 4753 scope.go:117] "RemoveContainer" containerID="0f3adcda95e55a0cbc83889bb696bd1509ded1a963fed2652a435cd571360908" Dec 05 17:24:28 crc kubenswrapper[4753]: I1205 17:24:28.978999 4753 patch_prober.go:28] interesting pod/machine-config-daemon-khn68 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 17:24:28 crc kubenswrapper[4753]: I1205 17:24:28.979734 4753 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 17:24:58 crc kubenswrapper[4753]: I1205 17:24:58.979857 4753 patch_prober.go:28] interesting pod/machine-config-daemon-khn68 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 17:24:58 crc kubenswrapper[4753]: I1205 17:24:58.980857 4753 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 17:25:28 crc kubenswrapper[4753]: I1205 17:25:28.979350 4753 patch_prober.go:28] interesting pod/machine-config-daemon-khn68 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 17:25:28 crc kubenswrapper[4753]: I1205 17:25:28.980515 4753 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 17:25:28 crc kubenswrapper[4753]: I1205 17:25:28.980629 4753 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-khn68" Dec 05 17:25:28 crc kubenswrapper[4753]: I1205 17:25:28.982063 4753 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"2170ea733cb4ab2e379e8299ee324bec350d8db90db1f1068d3bbeda7a75cd94"} pod="openshift-machine-config-operator/machine-config-daemon-khn68" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 17:25:28 crc kubenswrapper[4753]: I1205 17:25:28.982242 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerName="machine-config-daemon" containerID="cri-o://2170ea733cb4ab2e379e8299ee324bec350d8db90db1f1068d3bbeda7a75cd94" gracePeriod=600 Dec 05 17:25:29 crc kubenswrapper[4753]: I1205 17:25:29.867841 4753 generic.go:334] "Generic (PLEG): container finished" podID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerID="2170ea733cb4ab2e379e8299ee324bec350d8db90db1f1068d3bbeda7a75cd94" exitCode=0 Dec 05 17:25:29 crc kubenswrapper[4753]: I1205 17:25:29.867927 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" event={"ID":"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68","Type":"ContainerDied","Data":"2170ea733cb4ab2e379e8299ee324bec350d8db90db1f1068d3bbeda7a75cd94"} Dec 05 17:25:29 crc kubenswrapper[4753]: I1205 17:25:29.868590 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" event={"ID":"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68","Type":"ContainerStarted","Data":"e5f48dac911921ce787e6e9fea9709c824d57dffe736123659ddb77fb75ed48a"} Dec 05 17:25:29 crc kubenswrapper[4753]: I1205 17:25:29.868643 4753 scope.go:117] "RemoveContainer" containerID="7ca9474d05ab84ff7ea8a657d534bd38e1cef3f72237d6a798a2a1e2c1383e43" Dec 05 17:27:22 crc kubenswrapper[4753]: I1205 17:27:22.726558 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f876gj"] Dec 05 17:27:22 crc kubenswrapper[4753]: E1205 17:27:22.727772 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8b0eef2-60df-47b7-bd8c-c0b10fe65f48" containerName="extract-content" Dec 05 17:27:22 crc kubenswrapper[4753]: I1205 17:27:22.727790 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8b0eef2-60df-47b7-bd8c-c0b10fe65f48" containerName="extract-content" Dec 05 17:27:22 crc kubenswrapper[4753]: E1205 17:27:22.727806 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8552445-6e0f-49f6-920a-46a2b6604221" containerName="registry-server" Dec 05 17:27:22 crc kubenswrapper[4753]: I1205 17:27:22.727815 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8552445-6e0f-49f6-920a-46a2b6604221" containerName="registry-server" Dec 05 17:27:22 crc kubenswrapper[4753]: E1205 17:27:22.727830 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8552445-6e0f-49f6-920a-46a2b6604221" containerName="extract-utilities" Dec 05 17:27:22 crc kubenswrapper[4753]: I1205 17:27:22.727837 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8552445-6e0f-49f6-920a-46a2b6604221" containerName="extract-utilities" Dec 05 17:27:22 crc kubenswrapper[4753]: E1205 17:27:22.727848 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8b0eef2-60df-47b7-bd8c-c0b10fe65f48" containerName="extract-utilities" Dec 05 17:27:22 crc kubenswrapper[4753]: I1205 17:27:22.727855 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8b0eef2-60df-47b7-bd8c-c0b10fe65f48" containerName="extract-utilities" Dec 05 17:27:22 crc kubenswrapper[4753]: E1205 17:27:22.727879 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8b0eef2-60df-47b7-bd8c-c0b10fe65f48" containerName="registry-server" Dec 05 17:27:22 crc kubenswrapper[4753]: I1205 17:27:22.727887 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8b0eef2-60df-47b7-bd8c-c0b10fe65f48" containerName="registry-server" Dec 05 17:27:22 crc kubenswrapper[4753]: E1205 17:27:22.727899 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8552445-6e0f-49f6-920a-46a2b6604221" containerName="extract-content" Dec 05 17:27:22 crc kubenswrapper[4753]: I1205 17:27:22.727906 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8552445-6e0f-49f6-920a-46a2b6604221" containerName="extract-content" Dec 05 17:27:22 crc kubenswrapper[4753]: I1205 17:27:22.728044 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8b0eef2-60df-47b7-bd8c-c0b10fe65f48" containerName="registry-server" Dec 05 17:27:22 crc kubenswrapper[4753]: I1205 17:27:22.728066 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="c8552445-6e0f-49f6-920a-46a2b6604221" containerName="registry-server" Dec 05 17:27:22 crc kubenswrapper[4753]: I1205 17:27:22.729247 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f876gj" Dec 05 17:27:22 crc kubenswrapper[4753]: I1205 17:27:22.733570 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 05 17:27:22 crc kubenswrapper[4753]: I1205 17:27:22.750899 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f876gj"] Dec 05 17:27:22 crc kubenswrapper[4753]: I1205 17:27:22.810677 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v645g\" (UniqueName: \"kubernetes.io/projected/cd2c3a45-8e4a-47ec-9552-9aeafd8bb61d-kube-api-access-v645g\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f876gj\" (UID: \"cd2c3a45-8e4a-47ec-9552-9aeafd8bb61d\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f876gj" Dec 05 17:27:22 crc kubenswrapper[4753]: I1205 17:27:22.810751 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/cd2c3a45-8e4a-47ec-9552-9aeafd8bb61d-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f876gj\" (UID: \"cd2c3a45-8e4a-47ec-9552-9aeafd8bb61d\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f876gj" Dec 05 17:27:22 crc kubenswrapper[4753]: I1205 17:27:22.810793 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/cd2c3a45-8e4a-47ec-9552-9aeafd8bb61d-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f876gj\" (UID: \"cd2c3a45-8e4a-47ec-9552-9aeafd8bb61d\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f876gj" Dec 05 17:27:22 crc kubenswrapper[4753]: I1205 17:27:22.911794 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/cd2c3a45-8e4a-47ec-9552-9aeafd8bb61d-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f876gj\" (UID: \"cd2c3a45-8e4a-47ec-9552-9aeafd8bb61d\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f876gj" Dec 05 17:27:22 crc kubenswrapper[4753]: I1205 17:27:22.911990 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/cd2c3a45-8e4a-47ec-9552-9aeafd8bb61d-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f876gj\" (UID: \"cd2c3a45-8e4a-47ec-9552-9aeafd8bb61d\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f876gj" Dec 05 17:27:22 crc kubenswrapper[4753]: I1205 17:27:22.912068 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v645g\" (UniqueName: \"kubernetes.io/projected/cd2c3a45-8e4a-47ec-9552-9aeafd8bb61d-kube-api-access-v645g\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f876gj\" (UID: \"cd2c3a45-8e4a-47ec-9552-9aeafd8bb61d\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f876gj" Dec 05 17:27:22 crc kubenswrapper[4753]: I1205 17:27:22.912490 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/cd2c3a45-8e4a-47ec-9552-9aeafd8bb61d-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f876gj\" (UID: \"cd2c3a45-8e4a-47ec-9552-9aeafd8bb61d\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f876gj" Dec 05 17:27:22 crc kubenswrapper[4753]: I1205 17:27:22.912494 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/cd2c3a45-8e4a-47ec-9552-9aeafd8bb61d-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f876gj\" (UID: \"cd2c3a45-8e4a-47ec-9552-9aeafd8bb61d\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f876gj" Dec 05 17:27:22 crc kubenswrapper[4753]: I1205 17:27:22.939235 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v645g\" (UniqueName: \"kubernetes.io/projected/cd2c3a45-8e4a-47ec-9552-9aeafd8bb61d-kube-api-access-v645g\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f876gj\" (UID: \"cd2c3a45-8e4a-47ec-9552-9aeafd8bb61d\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f876gj" Dec 05 17:27:23 crc kubenswrapper[4753]: I1205 17:27:23.048511 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f876gj" Dec 05 17:27:23 crc kubenswrapper[4753]: I1205 17:27:23.288410 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f876gj"] Dec 05 17:27:23 crc kubenswrapper[4753]: I1205 17:27:23.831649 4753 generic.go:334] "Generic (PLEG): container finished" podID="cd2c3a45-8e4a-47ec-9552-9aeafd8bb61d" containerID="cc024755804524eedfdf1e2f26f01e81a9b902540fdf522b13bb8d22d4e77c1f" exitCode=0 Dec 05 17:27:23 crc kubenswrapper[4753]: I1205 17:27:23.831766 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f876gj" event={"ID":"cd2c3a45-8e4a-47ec-9552-9aeafd8bb61d","Type":"ContainerDied","Data":"cc024755804524eedfdf1e2f26f01e81a9b902540fdf522b13bb8d22d4e77c1f"} Dec 05 17:27:23 crc kubenswrapper[4753]: I1205 17:27:23.832100 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f876gj" event={"ID":"cd2c3a45-8e4a-47ec-9552-9aeafd8bb61d","Type":"ContainerStarted","Data":"2533ec93f0c6c2371dea8e5475b6ae8e39dd8383c0894467d511aef3f5895ab2"} Dec 05 17:27:23 crc kubenswrapper[4753]: I1205 17:27:23.834273 4753 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 17:27:25 crc kubenswrapper[4753]: I1205 17:27:25.856880 4753 generic.go:334] "Generic (PLEG): container finished" podID="cd2c3a45-8e4a-47ec-9552-9aeafd8bb61d" containerID="184e371755f0c68d9664c95b462b4774b02ebc25902a661c98ff7f0d978f16c1" exitCode=0 Dec 05 17:27:25 crc kubenswrapper[4753]: I1205 17:27:25.857140 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f876gj" event={"ID":"cd2c3a45-8e4a-47ec-9552-9aeafd8bb61d","Type":"ContainerDied","Data":"184e371755f0c68d9664c95b462b4774b02ebc25902a661c98ff7f0d978f16c1"} Dec 05 17:27:26 crc kubenswrapper[4753]: I1205 17:27:26.867443 4753 generic.go:334] "Generic (PLEG): container finished" podID="cd2c3a45-8e4a-47ec-9552-9aeafd8bb61d" containerID="72a44c5b9c918ed2014bf9f62d3c0073cef1b6aa9f53c66f33de02fed4cd51a4" exitCode=0 Dec 05 17:27:26 crc kubenswrapper[4753]: I1205 17:27:26.867529 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f876gj" event={"ID":"cd2c3a45-8e4a-47ec-9552-9aeafd8bb61d","Type":"ContainerDied","Data":"72a44c5b9c918ed2014bf9f62d3c0073cef1b6aa9f53c66f33de02fed4cd51a4"} Dec 05 17:27:28 crc kubenswrapper[4753]: I1205 17:27:28.157377 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f876gj" Dec 05 17:27:28 crc kubenswrapper[4753]: I1205 17:27:28.318603 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/cd2c3a45-8e4a-47ec-9552-9aeafd8bb61d-bundle\") pod \"cd2c3a45-8e4a-47ec-9552-9aeafd8bb61d\" (UID: \"cd2c3a45-8e4a-47ec-9552-9aeafd8bb61d\") " Dec 05 17:27:28 crc kubenswrapper[4753]: I1205 17:27:28.319178 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/cd2c3a45-8e4a-47ec-9552-9aeafd8bb61d-util\") pod \"cd2c3a45-8e4a-47ec-9552-9aeafd8bb61d\" (UID: \"cd2c3a45-8e4a-47ec-9552-9aeafd8bb61d\") " Dec 05 17:27:28 crc kubenswrapper[4753]: I1205 17:27:28.319329 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v645g\" (UniqueName: \"kubernetes.io/projected/cd2c3a45-8e4a-47ec-9552-9aeafd8bb61d-kube-api-access-v645g\") pod \"cd2c3a45-8e4a-47ec-9552-9aeafd8bb61d\" (UID: \"cd2c3a45-8e4a-47ec-9552-9aeafd8bb61d\") " Dec 05 17:27:28 crc kubenswrapper[4753]: I1205 17:27:28.320181 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cd2c3a45-8e4a-47ec-9552-9aeafd8bb61d-bundle" (OuterVolumeSpecName: "bundle") pod "cd2c3a45-8e4a-47ec-9552-9aeafd8bb61d" (UID: "cd2c3a45-8e4a-47ec-9552-9aeafd8bb61d"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:27:28 crc kubenswrapper[4753]: I1205 17:27:28.329767 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd2c3a45-8e4a-47ec-9552-9aeafd8bb61d-kube-api-access-v645g" (OuterVolumeSpecName: "kube-api-access-v645g") pod "cd2c3a45-8e4a-47ec-9552-9aeafd8bb61d" (UID: "cd2c3a45-8e4a-47ec-9552-9aeafd8bb61d"). InnerVolumeSpecName "kube-api-access-v645g". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:27:28 crc kubenswrapper[4753]: I1205 17:27:28.335709 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cd2c3a45-8e4a-47ec-9552-9aeafd8bb61d-util" (OuterVolumeSpecName: "util") pod "cd2c3a45-8e4a-47ec-9552-9aeafd8bb61d" (UID: "cd2c3a45-8e4a-47ec-9552-9aeafd8bb61d"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:27:28 crc kubenswrapper[4753]: I1205 17:27:28.420958 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v645g\" (UniqueName: \"kubernetes.io/projected/cd2c3a45-8e4a-47ec-9552-9aeafd8bb61d-kube-api-access-v645g\") on node \"crc\" DevicePath \"\"" Dec 05 17:27:28 crc kubenswrapper[4753]: I1205 17:27:28.421002 4753 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/cd2c3a45-8e4a-47ec-9552-9aeafd8bb61d-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:27:28 crc kubenswrapper[4753]: I1205 17:27:28.421012 4753 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/cd2c3a45-8e4a-47ec-9552-9aeafd8bb61d-util\") on node \"crc\" DevicePath \"\"" Dec 05 17:27:28 crc kubenswrapper[4753]: I1205 17:27:28.888860 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f876gj" event={"ID":"cd2c3a45-8e4a-47ec-9552-9aeafd8bb61d","Type":"ContainerDied","Data":"2533ec93f0c6c2371dea8e5475b6ae8e39dd8383c0894467d511aef3f5895ab2"} Dec 05 17:27:28 crc kubenswrapper[4753]: I1205 17:27:28.888913 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f876gj" Dec 05 17:27:28 crc kubenswrapper[4753]: I1205 17:27:28.888916 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2533ec93f0c6c2371dea8e5475b6ae8e39dd8383c0894467d511aef3f5895ab2" Dec 05 17:27:32 crc kubenswrapper[4753]: I1205 17:27:32.431103 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-5fxjp"] Dec 05 17:27:32 crc kubenswrapper[4753]: E1205 17:27:32.433673 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd2c3a45-8e4a-47ec-9552-9aeafd8bb61d" containerName="util" Dec 05 17:27:32 crc kubenswrapper[4753]: I1205 17:27:32.433699 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd2c3a45-8e4a-47ec-9552-9aeafd8bb61d" containerName="util" Dec 05 17:27:32 crc kubenswrapper[4753]: E1205 17:27:32.433730 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd2c3a45-8e4a-47ec-9552-9aeafd8bb61d" containerName="pull" Dec 05 17:27:32 crc kubenswrapper[4753]: I1205 17:27:32.433739 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd2c3a45-8e4a-47ec-9552-9aeafd8bb61d" containerName="pull" Dec 05 17:27:32 crc kubenswrapper[4753]: E1205 17:27:32.433755 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd2c3a45-8e4a-47ec-9552-9aeafd8bb61d" containerName="extract" Dec 05 17:27:32 crc kubenswrapper[4753]: I1205 17:27:32.433769 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd2c3a45-8e4a-47ec-9552-9aeafd8bb61d" containerName="extract" Dec 05 17:27:32 crc kubenswrapper[4753]: I1205 17:27:32.433923 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd2c3a45-8e4a-47ec-9552-9aeafd8bb61d" containerName="extract" Dec 05 17:27:32 crc kubenswrapper[4753]: I1205 17:27:32.434691 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-5fxjp" Dec 05 17:27:32 crc kubenswrapper[4753]: I1205 17:27:32.440714 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Dec 05 17:27:32 crc kubenswrapper[4753]: I1205 17:27:32.440865 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-prv75" Dec 05 17:27:32 crc kubenswrapper[4753]: I1205 17:27:32.442966 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Dec 05 17:27:32 crc kubenswrapper[4753]: I1205 17:27:32.449998 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-5fxjp"] Dec 05 17:27:32 crc kubenswrapper[4753]: I1205 17:27:32.591281 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7z42n\" (UniqueName: \"kubernetes.io/projected/37867715-d9ff-40d5-9c97-b99fd63be4b9-kube-api-access-7z42n\") pod \"nmstate-operator-5b5b58f5c8-5fxjp\" (UID: \"37867715-d9ff-40d5-9c97-b99fd63be4b9\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-5fxjp" Dec 05 17:27:32 crc kubenswrapper[4753]: I1205 17:27:32.693628 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7z42n\" (UniqueName: \"kubernetes.io/projected/37867715-d9ff-40d5-9c97-b99fd63be4b9-kube-api-access-7z42n\") pod \"nmstate-operator-5b5b58f5c8-5fxjp\" (UID: \"37867715-d9ff-40d5-9c97-b99fd63be4b9\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-5fxjp" Dec 05 17:27:32 crc kubenswrapper[4753]: I1205 17:27:32.722981 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7z42n\" (UniqueName: \"kubernetes.io/projected/37867715-d9ff-40d5-9c97-b99fd63be4b9-kube-api-access-7z42n\") pod \"nmstate-operator-5b5b58f5c8-5fxjp\" (UID: \"37867715-d9ff-40d5-9c97-b99fd63be4b9\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-5fxjp" Dec 05 17:27:32 crc kubenswrapper[4753]: I1205 17:27:32.756808 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-5fxjp" Dec 05 17:27:33 crc kubenswrapper[4753]: I1205 17:27:33.269959 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-5fxjp"] Dec 05 17:27:33 crc kubenswrapper[4753]: I1205 17:27:33.936229 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-5fxjp" event={"ID":"37867715-d9ff-40d5-9c97-b99fd63be4b9","Type":"ContainerStarted","Data":"bb8858f5f2ab31d8e749da65c14f22ff3ecac83b8ca3508945f139c8acbf478e"} Dec 05 17:27:35 crc kubenswrapper[4753]: I1205 17:27:35.955537 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-5fxjp" event={"ID":"37867715-d9ff-40d5-9c97-b99fd63be4b9","Type":"ContainerStarted","Data":"e6e09f67d70d65dc4bd1cf781945515c95cfe195bc4e5a4c57b3445b9c4f95c4"} Dec 05 17:27:35 crc kubenswrapper[4753]: I1205 17:27:35.979441 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-5fxjp" podStartSLOduration=1.8631637250000002 podStartE2EDuration="3.979405043s" podCreationTimestamp="2025-12-05 17:27:32 +0000 UTC" firstStartedPulling="2025-12-05 17:27:33.279938549 +0000 UTC m=+1391.783045585" lastFinishedPulling="2025-12-05 17:27:35.396179907 +0000 UTC m=+1393.899286903" observedRunningTime="2025-12-05 17:27:35.976763598 +0000 UTC m=+1394.479870644" watchObservedRunningTime="2025-12-05 17:27:35.979405043 +0000 UTC m=+1394.482512059" Dec 05 17:27:36 crc kubenswrapper[4753]: I1205 17:27:36.934684 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-ql66l"] Dec 05 17:27:36 crc kubenswrapper[4753]: I1205 17:27:36.936058 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-ql66l" Dec 05 17:27:36 crc kubenswrapper[4753]: I1205 17:27:36.938506 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-hmbcl" Dec 05 17:27:36 crc kubenswrapper[4753]: I1205 17:27:36.938650 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-mkz6b"] Dec 05 17:27:36 crc kubenswrapper[4753]: I1205 17:27:36.939761 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-mkz6b" Dec 05 17:27:36 crc kubenswrapper[4753]: I1205 17:27:36.961028 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Dec 05 17:27:36 crc kubenswrapper[4753]: I1205 17:27:36.981895 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fjvnx\" (UniqueName: \"kubernetes.io/projected/3f053a7e-ea2d-4b0e-b0fb-928c0038c436-kube-api-access-fjvnx\") pod \"nmstate-metrics-7f946cbc9-ql66l\" (UID: \"3f053a7e-ea2d-4b0e-b0fb-928c0038c436\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-ql66l" Dec 05 17:27:36 crc kubenswrapper[4753]: I1205 17:27:36.981998 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/dcfb16ac-e2ad-4b15-a3c7-d2c35e950739-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-mkz6b\" (UID: \"dcfb16ac-e2ad-4b15-a3c7-d2c35e950739\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-mkz6b" Dec 05 17:27:36 crc kubenswrapper[4753]: I1205 17:27:36.982049 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-chqmt\" (UniqueName: \"kubernetes.io/projected/dcfb16ac-e2ad-4b15-a3c7-d2c35e950739-kube-api-access-chqmt\") pod \"nmstate-webhook-5f6d4c5ccb-mkz6b\" (UID: \"dcfb16ac-e2ad-4b15-a3c7-d2c35e950739\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-mkz6b" Dec 05 17:27:37 crc kubenswrapper[4753]: I1205 17:27:37.031983 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-ql66l"] Dec 05 17:27:37 crc kubenswrapper[4753]: I1205 17:27:37.047501 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-nq5l2"] Dec 05 17:27:37 crc kubenswrapper[4753]: I1205 17:27:37.048953 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-nq5l2" Dec 05 17:27:37 crc kubenswrapper[4753]: I1205 17:27:37.067977 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-mkz6b"] Dec 05 17:27:37 crc kubenswrapper[4753]: I1205 17:27:37.085571 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gjwtz\" (UniqueName: \"kubernetes.io/projected/1d89bd55-dfdc-4dc5-94c7-36e3b21d95b3-kube-api-access-gjwtz\") pod \"nmstate-handler-nq5l2\" (UID: \"1d89bd55-dfdc-4dc5-94c7-36e3b21d95b3\") " pod="openshift-nmstate/nmstate-handler-nq5l2" Dec 05 17:27:37 crc kubenswrapper[4753]: I1205 17:27:37.085651 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/1d89bd55-dfdc-4dc5-94c7-36e3b21d95b3-dbus-socket\") pod \"nmstate-handler-nq5l2\" (UID: \"1d89bd55-dfdc-4dc5-94c7-36e3b21d95b3\") " pod="openshift-nmstate/nmstate-handler-nq5l2" Dec 05 17:27:37 crc kubenswrapper[4753]: I1205 17:27:37.085698 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/1d89bd55-dfdc-4dc5-94c7-36e3b21d95b3-nmstate-lock\") pod \"nmstate-handler-nq5l2\" (UID: \"1d89bd55-dfdc-4dc5-94c7-36e3b21d95b3\") " pod="openshift-nmstate/nmstate-handler-nq5l2" Dec 05 17:27:37 crc kubenswrapper[4753]: I1205 17:27:37.085730 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fjvnx\" (UniqueName: \"kubernetes.io/projected/3f053a7e-ea2d-4b0e-b0fb-928c0038c436-kube-api-access-fjvnx\") pod \"nmstate-metrics-7f946cbc9-ql66l\" (UID: \"3f053a7e-ea2d-4b0e-b0fb-928c0038c436\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-ql66l" Dec 05 17:27:37 crc kubenswrapper[4753]: I1205 17:27:37.085760 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/dcfb16ac-e2ad-4b15-a3c7-d2c35e950739-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-mkz6b\" (UID: \"dcfb16ac-e2ad-4b15-a3c7-d2c35e950739\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-mkz6b" Dec 05 17:27:37 crc kubenswrapper[4753]: I1205 17:27:37.085790 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-chqmt\" (UniqueName: \"kubernetes.io/projected/dcfb16ac-e2ad-4b15-a3c7-d2c35e950739-kube-api-access-chqmt\") pod \"nmstate-webhook-5f6d4c5ccb-mkz6b\" (UID: \"dcfb16ac-e2ad-4b15-a3c7-d2c35e950739\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-mkz6b" Dec 05 17:27:37 crc kubenswrapper[4753]: I1205 17:27:37.085854 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/1d89bd55-dfdc-4dc5-94c7-36e3b21d95b3-ovs-socket\") pod \"nmstate-handler-nq5l2\" (UID: \"1d89bd55-dfdc-4dc5-94c7-36e3b21d95b3\") " pod="openshift-nmstate/nmstate-handler-nq5l2" Dec 05 17:27:37 crc kubenswrapper[4753]: E1205 17:27:37.086639 4753 secret.go:188] Couldn't get secret openshift-nmstate/openshift-nmstate-webhook: secret "openshift-nmstate-webhook" not found Dec 05 17:27:37 crc kubenswrapper[4753]: E1205 17:27:37.086728 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/dcfb16ac-e2ad-4b15-a3c7-d2c35e950739-tls-key-pair podName:dcfb16ac-e2ad-4b15-a3c7-d2c35e950739 nodeName:}" failed. No retries permitted until 2025-12-05 17:27:37.586699442 +0000 UTC m=+1396.089806448 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-key-pair" (UniqueName: "kubernetes.io/secret/dcfb16ac-e2ad-4b15-a3c7-d2c35e950739-tls-key-pair") pod "nmstate-webhook-5f6d4c5ccb-mkz6b" (UID: "dcfb16ac-e2ad-4b15-a3c7-d2c35e950739") : secret "openshift-nmstate-webhook" not found Dec 05 17:27:37 crc kubenswrapper[4753]: I1205 17:27:37.120649 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fjvnx\" (UniqueName: \"kubernetes.io/projected/3f053a7e-ea2d-4b0e-b0fb-928c0038c436-kube-api-access-fjvnx\") pod \"nmstate-metrics-7f946cbc9-ql66l\" (UID: \"3f053a7e-ea2d-4b0e-b0fb-928c0038c436\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-ql66l" Dec 05 17:27:37 crc kubenswrapper[4753]: I1205 17:27:37.121087 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-chqmt\" (UniqueName: \"kubernetes.io/projected/dcfb16ac-e2ad-4b15-a3c7-d2c35e950739-kube-api-access-chqmt\") pod \"nmstate-webhook-5f6d4c5ccb-mkz6b\" (UID: \"dcfb16ac-e2ad-4b15-a3c7-d2c35e950739\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-mkz6b" Dec 05 17:27:37 crc kubenswrapper[4753]: I1205 17:27:37.187468 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/1d89bd55-dfdc-4dc5-94c7-36e3b21d95b3-ovs-socket\") pod \"nmstate-handler-nq5l2\" (UID: \"1d89bd55-dfdc-4dc5-94c7-36e3b21d95b3\") " pod="openshift-nmstate/nmstate-handler-nq5l2" Dec 05 17:27:37 crc kubenswrapper[4753]: I1205 17:27:37.187543 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gjwtz\" (UniqueName: \"kubernetes.io/projected/1d89bd55-dfdc-4dc5-94c7-36e3b21d95b3-kube-api-access-gjwtz\") pod \"nmstate-handler-nq5l2\" (UID: \"1d89bd55-dfdc-4dc5-94c7-36e3b21d95b3\") " pod="openshift-nmstate/nmstate-handler-nq5l2" Dec 05 17:27:37 crc kubenswrapper[4753]: I1205 17:27:37.187567 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/1d89bd55-dfdc-4dc5-94c7-36e3b21d95b3-dbus-socket\") pod \"nmstate-handler-nq5l2\" (UID: \"1d89bd55-dfdc-4dc5-94c7-36e3b21d95b3\") " pod="openshift-nmstate/nmstate-handler-nq5l2" Dec 05 17:27:37 crc kubenswrapper[4753]: I1205 17:27:37.187597 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/1d89bd55-dfdc-4dc5-94c7-36e3b21d95b3-nmstate-lock\") pod \"nmstate-handler-nq5l2\" (UID: \"1d89bd55-dfdc-4dc5-94c7-36e3b21d95b3\") " pod="openshift-nmstate/nmstate-handler-nq5l2" Dec 05 17:27:37 crc kubenswrapper[4753]: I1205 17:27:37.187663 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/1d89bd55-dfdc-4dc5-94c7-36e3b21d95b3-ovs-socket\") pod \"nmstate-handler-nq5l2\" (UID: \"1d89bd55-dfdc-4dc5-94c7-36e3b21d95b3\") " pod="openshift-nmstate/nmstate-handler-nq5l2" Dec 05 17:27:37 crc kubenswrapper[4753]: I1205 17:27:37.187764 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/1d89bd55-dfdc-4dc5-94c7-36e3b21d95b3-nmstate-lock\") pod \"nmstate-handler-nq5l2\" (UID: \"1d89bd55-dfdc-4dc5-94c7-36e3b21d95b3\") " pod="openshift-nmstate/nmstate-handler-nq5l2" Dec 05 17:27:37 crc kubenswrapper[4753]: I1205 17:27:37.188118 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/1d89bd55-dfdc-4dc5-94c7-36e3b21d95b3-dbus-socket\") pod \"nmstate-handler-nq5l2\" (UID: \"1d89bd55-dfdc-4dc5-94c7-36e3b21d95b3\") " pod="openshift-nmstate/nmstate-handler-nq5l2" Dec 05 17:27:37 crc kubenswrapper[4753]: I1205 17:27:37.221043 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-hdwrw"] Dec 05 17:27:37 crc kubenswrapper[4753]: I1205 17:27:37.223080 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-hdwrw" Dec 05 17:27:37 crc kubenswrapper[4753]: I1205 17:27:37.226573 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Dec 05 17:27:37 crc kubenswrapper[4753]: I1205 17:27:37.226838 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-h2h6v" Dec 05 17:27:37 crc kubenswrapper[4753]: I1205 17:27:37.226943 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Dec 05 17:27:37 crc kubenswrapper[4753]: I1205 17:27:37.227303 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gjwtz\" (UniqueName: \"kubernetes.io/projected/1d89bd55-dfdc-4dc5-94c7-36e3b21d95b3-kube-api-access-gjwtz\") pod \"nmstate-handler-nq5l2\" (UID: \"1d89bd55-dfdc-4dc5-94c7-36e3b21d95b3\") " pod="openshift-nmstate/nmstate-handler-nq5l2" Dec 05 17:27:37 crc kubenswrapper[4753]: I1205 17:27:37.236362 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-hdwrw"] Dec 05 17:27:37 crc kubenswrapper[4753]: I1205 17:27:37.272331 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-ql66l" Dec 05 17:27:37 crc kubenswrapper[4753]: I1205 17:27:37.288652 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/eccebd56-3231-40e1-b2f8-3b02547ff479-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-hdwrw\" (UID: \"eccebd56-3231-40e1-b2f8-3b02547ff479\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-hdwrw" Dec 05 17:27:37 crc kubenswrapper[4753]: I1205 17:27:37.289194 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4wmfp\" (UniqueName: \"kubernetes.io/projected/eccebd56-3231-40e1-b2f8-3b02547ff479-kube-api-access-4wmfp\") pod \"nmstate-console-plugin-7fbb5f6569-hdwrw\" (UID: \"eccebd56-3231-40e1-b2f8-3b02547ff479\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-hdwrw" Dec 05 17:27:37 crc kubenswrapper[4753]: I1205 17:27:37.289240 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/eccebd56-3231-40e1-b2f8-3b02547ff479-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-hdwrw\" (UID: \"eccebd56-3231-40e1-b2f8-3b02547ff479\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-hdwrw" Dec 05 17:27:37 crc kubenswrapper[4753]: I1205 17:27:37.381849 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-nq5l2" Dec 05 17:27:37 crc kubenswrapper[4753]: I1205 17:27:37.390256 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/eccebd56-3231-40e1-b2f8-3b02547ff479-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-hdwrw\" (UID: \"eccebd56-3231-40e1-b2f8-3b02547ff479\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-hdwrw" Dec 05 17:27:37 crc kubenswrapper[4753]: I1205 17:27:37.390287 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4wmfp\" (UniqueName: \"kubernetes.io/projected/eccebd56-3231-40e1-b2f8-3b02547ff479-kube-api-access-4wmfp\") pod \"nmstate-console-plugin-7fbb5f6569-hdwrw\" (UID: \"eccebd56-3231-40e1-b2f8-3b02547ff479\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-hdwrw" Dec 05 17:27:37 crc kubenswrapper[4753]: I1205 17:27:37.390333 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/eccebd56-3231-40e1-b2f8-3b02547ff479-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-hdwrw\" (UID: \"eccebd56-3231-40e1-b2f8-3b02547ff479\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-hdwrw" Dec 05 17:27:37 crc kubenswrapper[4753]: I1205 17:27:37.391514 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/eccebd56-3231-40e1-b2f8-3b02547ff479-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-hdwrw\" (UID: \"eccebd56-3231-40e1-b2f8-3b02547ff479\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-hdwrw" Dec 05 17:27:37 crc kubenswrapper[4753]: I1205 17:27:37.406277 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/eccebd56-3231-40e1-b2f8-3b02547ff479-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-hdwrw\" (UID: \"eccebd56-3231-40e1-b2f8-3b02547ff479\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-hdwrw" Dec 05 17:27:37 crc kubenswrapper[4753]: W1205 17:27:37.412660 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1d89bd55_dfdc_4dc5_94c7_36e3b21d95b3.slice/crio-64b9b112b09f02bada8e6190c79876352ac7149b007d2a2948021f5690361ab3 WatchSource:0}: Error finding container 64b9b112b09f02bada8e6190c79876352ac7149b007d2a2948021f5690361ab3: Status 404 returned error can't find the container with id 64b9b112b09f02bada8e6190c79876352ac7149b007d2a2948021f5690361ab3 Dec 05 17:27:37 crc kubenswrapper[4753]: I1205 17:27:37.417661 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4wmfp\" (UniqueName: \"kubernetes.io/projected/eccebd56-3231-40e1-b2f8-3b02547ff479-kube-api-access-4wmfp\") pod \"nmstate-console-plugin-7fbb5f6569-hdwrw\" (UID: \"eccebd56-3231-40e1-b2f8-3b02547ff479\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-hdwrw" Dec 05 17:27:37 crc kubenswrapper[4753]: I1205 17:27:37.436489 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-595dff7499-c8hvp"] Dec 05 17:27:37 crc kubenswrapper[4753]: I1205 17:27:37.437817 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-595dff7499-c8hvp" Dec 05 17:27:37 crc kubenswrapper[4753]: I1205 17:27:37.463052 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-595dff7499-c8hvp"] Dec 05 17:27:37 crc kubenswrapper[4753]: I1205 17:27:37.493120 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/a65abc52-fef9-4087-8b91-094c74ef004d-console-oauth-config\") pod \"console-595dff7499-c8hvp\" (UID: \"a65abc52-fef9-4087-8b91-094c74ef004d\") " pod="openshift-console/console-595dff7499-c8hvp" Dec 05 17:27:37 crc kubenswrapper[4753]: I1205 17:27:37.493555 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/a65abc52-fef9-4087-8b91-094c74ef004d-oauth-serving-cert\") pod \"console-595dff7499-c8hvp\" (UID: \"a65abc52-fef9-4087-8b91-094c74ef004d\") " pod="openshift-console/console-595dff7499-c8hvp" Dec 05 17:27:37 crc kubenswrapper[4753]: I1205 17:27:37.493588 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a65abc52-fef9-4087-8b91-094c74ef004d-service-ca\") pod \"console-595dff7499-c8hvp\" (UID: \"a65abc52-fef9-4087-8b91-094c74ef004d\") " pod="openshift-console/console-595dff7499-c8hvp" Dec 05 17:27:37 crc kubenswrapper[4753]: I1205 17:27:37.493616 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/a65abc52-fef9-4087-8b91-094c74ef004d-console-serving-cert\") pod \"console-595dff7499-c8hvp\" (UID: \"a65abc52-fef9-4087-8b91-094c74ef004d\") " pod="openshift-console/console-595dff7499-c8hvp" Dec 05 17:27:37 crc kubenswrapper[4753]: I1205 17:27:37.493664 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/a65abc52-fef9-4087-8b91-094c74ef004d-console-config\") pod \"console-595dff7499-c8hvp\" (UID: \"a65abc52-fef9-4087-8b91-094c74ef004d\") " pod="openshift-console/console-595dff7499-c8hvp" Dec 05 17:27:37 crc kubenswrapper[4753]: I1205 17:27:37.493742 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a65abc52-fef9-4087-8b91-094c74ef004d-trusted-ca-bundle\") pod \"console-595dff7499-c8hvp\" (UID: \"a65abc52-fef9-4087-8b91-094c74ef004d\") " pod="openshift-console/console-595dff7499-c8hvp" Dec 05 17:27:37 crc kubenswrapper[4753]: I1205 17:27:37.493761 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-flt2c\" (UniqueName: \"kubernetes.io/projected/a65abc52-fef9-4087-8b91-094c74ef004d-kube-api-access-flt2c\") pod \"console-595dff7499-c8hvp\" (UID: \"a65abc52-fef9-4087-8b91-094c74ef004d\") " pod="openshift-console/console-595dff7499-c8hvp" Dec 05 17:27:37 crc kubenswrapper[4753]: I1205 17:27:37.550307 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-ql66l"] Dec 05 17:27:37 crc kubenswrapper[4753]: I1205 17:27:37.564746 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-hdwrw" Dec 05 17:27:37 crc kubenswrapper[4753]: I1205 17:27:37.607234 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/a65abc52-fef9-4087-8b91-094c74ef004d-oauth-serving-cert\") pod \"console-595dff7499-c8hvp\" (UID: \"a65abc52-fef9-4087-8b91-094c74ef004d\") " pod="openshift-console/console-595dff7499-c8hvp" Dec 05 17:27:37 crc kubenswrapper[4753]: I1205 17:27:37.607316 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a65abc52-fef9-4087-8b91-094c74ef004d-service-ca\") pod \"console-595dff7499-c8hvp\" (UID: \"a65abc52-fef9-4087-8b91-094c74ef004d\") " pod="openshift-console/console-595dff7499-c8hvp" Dec 05 17:27:37 crc kubenswrapper[4753]: I1205 17:27:37.607350 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/a65abc52-fef9-4087-8b91-094c74ef004d-console-serving-cert\") pod \"console-595dff7499-c8hvp\" (UID: \"a65abc52-fef9-4087-8b91-094c74ef004d\") " pod="openshift-console/console-595dff7499-c8hvp" Dec 05 17:27:37 crc kubenswrapper[4753]: I1205 17:27:37.607410 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/a65abc52-fef9-4087-8b91-094c74ef004d-console-config\") pod \"console-595dff7499-c8hvp\" (UID: \"a65abc52-fef9-4087-8b91-094c74ef004d\") " pod="openshift-console/console-595dff7499-c8hvp" Dec 05 17:27:37 crc kubenswrapper[4753]: I1205 17:27:37.607455 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a65abc52-fef9-4087-8b91-094c74ef004d-trusted-ca-bundle\") pod \"console-595dff7499-c8hvp\" (UID: \"a65abc52-fef9-4087-8b91-094c74ef004d\") " pod="openshift-console/console-595dff7499-c8hvp" Dec 05 17:27:37 crc kubenswrapper[4753]: I1205 17:27:37.607480 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-flt2c\" (UniqueName: \"kubernetes.io/projected/a65abc52-fef9-4087-8b91-094c74ef004d-kube-api-access-flt2c\") pod \"console-595dff7499-c8hvp\" (UID: \"a65abc52-fef9-4087-8b91-094c74ef004d\") " pod="openshift-console/console-595dff7499-c8hvp" Dec 05 17:27:37 crc kubenswrapper[4753]: I1205 17:27:37.607514 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/dcfb16ac-e2ad-4b15-a3c7-d2c35e950739-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-mkz6b\" (UID: \"dcfb16ac-e2ad-4b15-a3c7-d2c35e950739\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-mkz6b" Dec 05 17:27:37 crc kubenswrapper[4753]: I1205 17:27:37.607545 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/a65abc52-fef9-4087-8b91-094c74ef004d-console-oauth-config\") pod \"console-595dff7499-c8hvp\" (UID: \"a65abc52-fef9-4087-8b91-094c74ef004d\") " pod="openshift-console/console-595dff7499-c8hvp" Dec 05 17:27:37 crc kubenswrapper[4753]: I1205 17:27:37.610552 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/a65abc52-fef9-4087-8b91-094c74ef004d-console-config\") pod \"console-595dff7499-c8hvp\" (UID: \"a65abc52-fef9-4087-8b91-094c74ef004d\") " pod="openshift-console/console-595dff7499-c8hvp" Dec 05 17:27:37 crc kubenswrapper[4753]: I1205 17:27:37.610575 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a65abc52-fef9-4087-8b91-094c74ef004d-service-ca\") pod \"console-595dff7499-c8hvp\" (UID: \"a65abc52-fef9-4087-8b91-094c74ef004d\") " pod="openshift-console/console-595dff7499-c8hvp" Dec 05 17:27:37 crc kubenswrapper[4753]: I1205 17:27:37.611238 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/a65abc52-fef9-4087-8b91-094c74ef004d-oauth-serving-cert\") pod \"console-595dff7499-c8hvp\" (UID: \"a65abc52-fef9-4087-8b91-094c74ef004d\") " pod="openshift-console/console-595dff7499-c8hvp" Dec 05 17:27:37 crc kubenswrapper[4753]: I1205 17:27:37.612602 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a65abc52-fef9-4087-8b91-094c74ef004d-trusted-ca-bundle\") pod \"console-595dff7499-c8hvp\" (UID: \"a65abc52-fef9-4087-8b91-094c74ef004d\") " pod="openshift-console/console-595dff7499-c8hvp" Dec 05 17:27:37 crc kubenswrapper[4753]: I1205 17:27:37.618117 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/a65abc52-fef9-4087-8b91-094c74ef004d-console-serving-cert\") pod \"console-595dff7499-c8hvp\" (UID: \"a65abc52-fef9-4087-8b91-094c74ef004d\") " pod="openshift-console/console-595dff7499-c8hvp" Dec 05 17:27:37 crc kubenswrapper[4753]: I1205 17:27:37.618981 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/dcfb16ac-e2ad-4b15-a3c7-d2c35e950739-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-mkz6b\" (UID: \"dcfb16ac-e2ad-4b15-a3c7-d2c35e950739\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-mkz6b" Dec 05 17:27:37 crc kubenswrapper[4753]: I1205 17:27:37.619822 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/a65abc52-fef9-4087-8b91-094c74ef004d-console-oauth-config\") pod \"console-595dff7499-c8hvp\" (UID: \"a65abc52-fef9-4087-8b91-094c74ef004d\") " pod="openshift-console/console-595dff7499-c8hvp" Dec 05 17:27:37 crc kubenswrapper[4753]: I1205 17:27:37.620684 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-mkz6b" Dec 05 17:27:37 crc kubenswrapper[4753]: I1205 17:27:37.638966 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-flt2c\" (UniqueName: \"kubernetes.io/projected/a65abc52-fef9-4087-8b91-094c74ef004d-kube-api-access-flt2c\") pod \"console-595dff7499-c8hvp\" (UID: \"a65abc52-fef9-4087-8b91-094c74ef004d\") " pod="openshift-console/console-595dff7499-c8hvp" Dec 05 17:27:37 crc kubenswrapper[4753]: I1205 17:27:37.775891 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-595dff7499-c8hvp" Dec 05 17:27:38 crc kubenswrapper[4753]: I1205 17:27:38.012270 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-nq5l2" event={"ID":"1d89bd55-dfdc-4dc5-94c7-36e3b21d95b3","Type":"ContainerStarted","Data":"64b9b112b09f02bada8e6190c79876352ac7149b007d2a2948021f5690361ab3"} Dec 05 17:27:38 crc kubenswrapper[4753]: I1205 17:27:38.016772 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-ql66l" event={"ID":"3f053a7e-ea2d-4b0e-b0fb-928c0038c436","Type":"ContainerStarted","Data":"29468bf06370e32fada235ccb1746595d9ea9823def9f7cc03b65b66f89162d7"} Dec 05 17:27:38 crc kubenswrapper[4753]: I1205 17:27:38.058479 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-595dff7499-c8hvp"] Dec 05 17:27:38 crc kubenswrapper[4753]: W1205 17:27:38.059276 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda65abc52_fef9_4087_8b91_094c74ef004d.slice/crio-5087562c5e0485868e074df18831b7ce3dc4d071273795a3850e5223e51f9890 WatchSource:0}: Error finding container 5087562c5e0485868e074df18831b7ce3dc4d071273795a3850e5223e51f9890: Status 404 returned error can't find the container with id 5087562c5e0485868e074df18831b7ce3dc4d071273795a3850e5223e51f9890 Dec 05 17:27:38 crc kubenswrapper[4753]: I1205 17:27:38.094756 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-hdwrw"] Dec 05 17:27:38 crc kubenswrapper[4753]: W1205 17:27:38.096988 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podeccebd56_3231_40e1_b2f8_3b02547ff479.slice/crio-5703e4bea54b0be6b8c85a679b33f7dcdf9e7af7c7769ead69f95a3d7a762a60 WatchSource:0}: Error finding container 5703e4bea54b0be6b8c85a679b33f7dcdf9e7af7c7769ead69f95a3d7a762a60: Status 404 returned error can't find the container with id 5703e4bea54b0be6b8c85a679b33f7dcdf9e7af7c7769ead69f95a3d7a762a60 Dec 05 17:27:38 crc kubenswrapper[4753]: I1205 17:27:38.182633 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-mkz6b"] Dec 05 17:27:39 crc kubenswrapper[4753]: I1205 17:27:39.024796 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-mkz6b" event={"ID":"dcfb16ac-e2ad-4b15-a3c7-d2c35e950739","Type":"ContainerStarted","Data":"33bb080fb8a18ede4dd3412f52d4a563047ebec23cfc21d3282cc145a1acd344"} Dec 05 17:27:39 crc kubenswrapper[4753]: I1205 17:27:39.025690 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-hdwrw" event={"ID":"eccebd56-3231-40e1-b2f8-3b02547ff479","Type":"ContainerStarted","Data":"5703e4bea54b0be6b8c85a679b33f7dcdf9e7af7c7769ead69f95a3d7a762a60"} Dec 05 17:27:39 crc kubenswrapper[4753]: I1205 17:27:39.026825 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-595dff7499-c8hvp" event={"ID":"a65abc52-fef9-4087-8b91-094c74ef004d","Type":"ContainerStarted","Data":"9bac09c4b15b45351d88a724c224fba03470860ad3006279e0a09b811a5aed05"} Dec 05 17:27:39 crc kubenswrapper[4753]: I1205 17:27:39.026850 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-595dff7499-c8hvp" event={"ID":"a65abc52-fef9-4087-8b91-094c74ef004d","Type":"ContainerStarted","Data":"5087562c5e0485868e074df18831b7ce3dc4d071273795a3850e5223e51f9890"} Dec 05 17:27:39 crc kubenswrapper[4753]: I1205 17:27:39.054934 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-595dff7499-c8hvp" podStartSLOduration=2.054873295 podStartE2EDuration="2.054873295s" podCreationTimestamp="2025-12-05 17:27:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:27:39.04867883 +0000 UTC m=+1397.551785846" watchObservedRunningTime="2025-12-05 17:27:39.054873295 +0000 UTC m=+1397.557980321" Dec 05 17:27:42 crc kubenswrapper[4753]: I1205 17:27:42.063536 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-mkz6b" event={"ID":"dcfb16ac-e2ad-4b15-a3c7-d2c35e950739","Type":"ContainerStarted","Data":"d1374a6ffd387f25f3c6d461be59bcda24c812c8d65de80319316044e1908bce"} Dec 05 17:27:42 crc kubenswrapper[4753]: I1205 17:27:42.064582 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-mkz6b" Dec 05 17:27:42 crc kubenswrapper[4753]: I1205 17:27:42.066276 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-hdwrw" event={"ID":"eccebd56-3231-40e1-b2f8-3b02547ff479","Type":"ContainerStarted","Data":"119fb08258516e4680548717b2e504bf1941c5ade1ef6ec4fdd8e395d8d791d4"} Dec 05 17:27:42 crc kubenswrapper[4753]: I1205 17:27:42.068226 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-nq5l2" event={"ID":"1d89bd55-dfdc-4dc5-94c7-36e3b21d95b3","Type":"ContainerStarted","Data":"64bc1f273cb66e49b24d0ec044ffbc4dff9ae27eca92dae8b7a8859d687c34ae"} Dec 05 17:27:42 crc kubenswrapper[4753]: I1205 17:27:42.069204 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-nq5l2" Dec 05 17:27:42 crc kubenswrapper[4753]: I1205 17:27:42.070535 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-ql66l" event={"ID":"3f053a7e-ea2d-4b0e-b0fb-928c0038c436","Type":"ContainerStarted","Data":"b4d80e832680ed2036b3f1dac0441c08f3f13e714ddac22fdcfdc6a318ffd3b3"} Dec 05 17:27:42 crc kubenswrapper[4753]: I1205 17:27:42.100086 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-mkz6b" podStartSLOduration=3.402935793 podStartE2EDuration="6.100056681s" podCreationTimestamp="2025-12-05 17:27:36 +0000 UTC" firstStartedPulling="2025-12-05 17:27:38.203258153 +0000 UTC m=+1396.706365159" lastFinishedPulling="2025-12-05 17:27:40.900379041 +0000 UTC m=+1399.403486047" observedRunningTime="2025-12-05 17:27:42.095268586 +0000 UTC m=+1400.598375602" watchObservedRunningTime="2025-12-05 17:27:42.100056681 +0000 UTC m=+1400.603163697" Dec 05 17:27:42 crc kubenswrapper[4753]: I1205 17:27:42.125366 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-nq5l2" podStartSLOduration=2.665139939 podStartE2EDuration="6.125332606s" podCreationTimestamp="2025-12-05 17:27:36 +0000 UTC" firstStartedPulling="2025-12-05 17:27:37.42835063 +0000 UTC m=+1395.931457636" lastFinishedPulling="2025-12-05 17:27:40.888543297 +0000 UTC m=+1399.391650303" observedRunningTime="2025-12-05 17:27:42.118842352 +0000 UTC m=+1400.621949388" watchObservedRunningTime="2025-12-05 17:27:42.125332606 +0000 UTC m=+1400.628439612" Dec 05 17:27:42 crc kubenswrapper[4753]: I1205 17:27:42.139036 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-hdwrw" podStartSLOduration=2.351413996 podStartE2EDuration="5.139002362s" podCreationTimestamp="2025-12-05 17:27:37 +0000 UTC" firstStartedPulling="2025-12-05 17:27:38.099550181 +0000 UTC m=+1396.602657187" lastFinishedPulling="2025-12-05 17:27:40.887138557 +0000 UTC m=+1399.390245553" observedRunningTime="2025-12-05 17:27:42.137652694 +0000 UTC m=+1400.640759710" watchObservedRunningTime="2025-12-05 17:27:42.139002362 +0000 UTC m=+1400.642109378" Dec 05 17:27:44 crc kubenswrapper[4753]: I1205 17:27:44.105930 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-ql66l" event={"ID":"3f053a7e-ea2d-4b0e-b0fb-928c0038c436","Type":"ContainerStarted","Data":"7582427c8d63fd3fe4f7bb6a482dd8cd844b2ffa1fb0174b58bf7d146980b7c8"} Dec 05 17:27:44 crc kubenswrapper[4753]: I1205 17:27:44.130852 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-ql66l" podStartSLOduration=2.146974043 podStartE2EDuration="8.130835235s" podCreationTimestamp="2025-12-05 17:27:36 +0000 UTC" firstStartedPulling="2025-12-05 17:27:37.562755749 +0000 UTC m=+1396.065862755" lastFinishedPulling="2025-12-05 17:27:43.546616931 +0000 UTC m=+1402.049723947" observedRunningTime="2025-12-05 17:27:44.128930901 +0000 UTC m=+1402.632037907" watchObservedRunningTime="2025-12-05 17:27:44.130835235 +0000 UTC m=+1402.633942241" Dec 05 17:27:47 crc kubenswrapper[4753]: I1205 17:27:47.407562 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-nq5l2" Dec 05 17:27:47 crc kubenswrapper[4753]: I1205 17:27:47.775969 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-595dff7499-c8hvp" Dec 05 17:27:47 crc kubenswrapper[4753]: I1205 17:27:47.776027 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-595dff7499-c8hvp" Dec 05 17:27:47 crc kubenswrapper[4753]: I1205 17:27:47.783640 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-595dff7499-c8hvp" Dec 05 17:27:48 crc kubenswrapper[4753]: I1205 17:27:48.151765 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-595dff7499-c8hvp" Dec 05 17:27:48 crc kubenswrapper[4753]: I1205 17:27:48.230288 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-9c622"] Dec 05 17:27:57 crc kubenswrapper[4753]: I1205 17:27:57.631930 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-mkz6b" Dec 05 17:27:58 crc kubenswrapper[4753]: I1205 17:27:58.979519 4753 patch_prober.go:28] interesting pod/machine-config-daemon-khn68 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 17:27:58 crc kubenswrapper[4753]: I1205 17:27:58.979619 4753 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 17:28:13 crc kubenswrapper[4753]: I1205 17:28:13.288541 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-9c622" podUID="48038042-7b0f-48d9-9f90-6c0b9dd179d6" containerName="console" containerID="cri-o://0b4af9c9d235e28f01dfbf710b0a34ec240e146a82006c133a6058d0942accf6" gracePeriod=15 Dec 05 17:28:13 crc kubenswrapper[4753]: I1205 17:28:13.814260 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-9c622_48038042-7b0f-48d9-9f90-6c0b9dd179d6/console/0.log" Dec 05 17:28:13 crc kubenswrapper[4753]: I1205 17:28:13.814731 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-9c622" Dec 05 17:28:13 crc kubenswrapper[4753]: I1205 17:28:13.944231 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/48038042-7b0f-48d9-9f90-6c0b9dd179d6-oauth-serving-cert\") pod \"48038042-7b0f-48d9-9f90-6c0b9dd179d6\" (UID: \"48038042-7b0f-48d9-9f90-6c0b9dd179d6\") " Dec 05 17:28:13 crc kubenswrapper[4753]: I1205 17:28:13.944563 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-psmlg\" (UniqueName: \"kubernetes.io/projected/48038042-7b0f-48d9-9f90-6c0b9dd179d6-kube-api-access-psmlg\") pod \"48038042-7b0f-48d9-9f90-6c0b9dd179d6\" (UID: \"48038042-7b0f-48d9-9f90-6c0b9dd179d6\") " Dec 05 17:28:13 crc kubenswrapper[4753]: I1205 17:28:13.944752 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/48038042-7b0f-48d9-9f90-6c0b9dd179d6-service-ca\") pod \"48038042-7b0f-48d9-9f90-6c0b9dd179d6\" (UID: \"48038042-7b0f-48d9-9f90-6c0b9dd179d6\") " Dec 05 17:28:13 crc kubenswrapper[4753]: I1205 17:28:13.944885 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/48038042-7b0f-48d9-9f90-6c0b9dd179d6-trusted-ca-bundle\") pod \"48038042-7b0f-48d9-9f90-6c0b9dd179d6\" (UID: \"48038042-7b0f-48d9-9f90-6c0b9dd179d6\") " Dec 05 17:28:13 crc kubenswrapper[4753]: I1205 17:28:13.945009 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/48038042-7b0f-48d9-9f90-6c0b9dd179d6-console-oauth-config\") pod \"48038042-7b0f-48d9-9f90-6c0b9dd179d6\" (UID: \"48038042-7b0f-48d9-9f90-6c0b9dd179d6\") " Dec 05 17:28:13 crc kubenswrapper[4753]: I1205 17:28:13.945119 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/48038042-7b0f-48d9-9f90-6c0b9dd179d6-console-serving-cert\") pod \"48038042-7b0f-48d9-9f90-6c0b9dd179d6\" (UID: \"48038042-7b0f-48d9-9f90-6c0b9dd179d6\") " Dec 05 17:28:13 crc kubenswrapper[4753]: I1205 17:28:13.945265 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/48038042-7b0f-48d9-9f90-6c0b9dd179d6-console-config\") pod \"48038042-7b0f-48d9-9f90-6c0b9dd179d6\" (UID: \"48038042-7b0f-48d9-9f90-6c0b9dd179d6\") " Dec 05 17:28:13 crc kubenswrapper[4753]: I1205 17:28:13.944932 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/48038042-7b0f-48d9-9f90-6c0b9dd179d6-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "48038042-7b0f-48d9-9f90-6c0b9dd179d6" (UID: "48038042-7b0f-48d9-9f90-6c0b9dd179d6"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:28:13 crc kubenswrapper[4753]: I1205 17:28:13.945445 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/48038042-7b0f-48d9-9f90-6c0b9dd179d6-service-ca" (OuterVolumeSpecName: "service-ca") pod "48038042-7b0f-48d9-9f90-6c0b9dd179d6" (UID: "48038042-7b0f-48d9-9f90-6c0b9dd179d6"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:28:13 crc kubenswrapper[4753]: I1205 17:28:13.945571 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/48038042-7b0f-48d9-9f90-6c0b9dd179d6-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "48038042-7b0f-48d9-9f90-6c0b9dd179d6" (UID: "48038042-7b0f-48d9-9f90-6c0b9dd179d6"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:28:13 crc kubenswrapper[4753]: I1205 17:28:13.945942 4753 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/48038042-7b0f-48d9-9f90-6c0b9dd179d6-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 17:28:13 crc kubenswrapper[4753]: I1205 17:28:13.946034 4753 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/48038042-7b0f-48d9-9f90-6c0b9dd179d6-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 17:28:13 crc kubenswrapper[4753]: I1205 17:28:13.946079 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/48038042-7b0f-48d9-9f90-6c0b9dd179d6-console-config" (OuterVolumeSpecName: "console-config") pod "48038042-7b0f-48d9-9f90-6c0b9dd179d6" (UID: "48038042-7b0f-48d9-9f90-6c0b9dd179d6"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:28:13 crc kubenswrapper[4753]: I1205 17:28:13.946165 4753 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/48038042-7b0f-48d9-9f90-6c0b9dd179d6-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:28:13 crc kubenswrapper[4753]: I1205 17:28:13.952373 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/48038042-7b0f-48d9-9f90-6c0b9dd179d6-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "48038042-7b0f-48d9-9f90-6c0b9dd179d6" (UID: "48038042-7b0f-48d9-9f90-6c0b9dd179d6"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:28:13 crc kubenswrapper[4753]: I1205 17:28:13.952697 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/48038042-7b0f-48d9-9f90-6c0b9dd179d6-kube-api-access-psmlg" (OuterVolumeSpecName: "kube-api-access-psmlg") pod "48038042-7b0f-48d9-9f90-6c0b9dd179d6" (UID: "48038042-7b0f-48d9-9f90-6c0b9dd179d6"). InnerVolumeSpecName "kube-api-access-psmlg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:28:13 crc kubenswrapper[4753]: I1205 17:28:13.963093 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/48038042-7b0f-48d9-9f90-6c0b9dd179d6-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "48038042-7b0f-48d9-9f90-6c0b9dd179d6" (UID: "48038042-7b0f-48d9-9f90-6c0b9dd179d6"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:28:14 crc kubenswrapper[4753]: I1205 17:28:14.047910 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-psmlg\" (UniqueName: \"kubernetes.io/projected/48038042-7b0f-48d9-9f90-6c0b9dd179d6-kube-api-access-psmlg\") on node \"crc\" DevicePath \"\"" Dec 05 17:28:14 crc kubenswrapper[4753]: I1205 17:28:14.048439 4753 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/48038042-7b0f-48d9-9f90-6c0b9dd179d6-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 17:28:14 crc kubenswrapper[4753]: I1205 17:28:14.048460 4753 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/48038042-7b0f-48d9-9f90-6c0b9dd179d6-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:28:14 crc kubenswrapper[4753]: I1205 17:28:14.048479 4753 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/48038042-7b0f-48d9-9f90-6c0b9dd179d6-console-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:28:14 crc kubenswrapper[4753]: I1205 17:28:14.366685 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-9c622_48038042-7b0f-48d9-9f90-6c0b9dd179d6/console/0.log" Dec 05 17:28:14 crc kubenswrapper[4753]: I1205 17:28:14.366759 4753 generic.go:334] "Generic (PLEG): container finished" podID="48038042-7b0f-48d9-9f90-6c0b9dd179d6" containerID="0b4af9c9d235e28f01dfbf710b0a34ec240e146a82006c133a6058d0942accf6" exitCode=2 Dec 05 17:28:14 crc kubenswrapper[4753]: I1205 17:28:14.366808 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-9c622" event={"ID":"48038042-7b0f-48d9-9f90-6c0b9dd179d6","Type":"ContainerDied","Data":"0b4af9c9d235e28f01dfbf710b0a34ec240e146a82006c133a6058d0942accf6"} Dec 05 17:28:14 crc kubenswrapper[4753]: I1205 17:28:14.366850 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-9c622" event={"ID":"48038042-7b0f-48d9-9f90-6c0b9dd179d6","Type":"ContainerDied","Data":"600a4a2dec294e252cfc6e7db3796b7533eddfe0cf4eda204d3f2bc5de668196"} Dec 05 17:28:14 crc kubenswrapper[4753]: I1205 17:28:14.366874 4753 scope.go:117] "RemoveContainer" containerID="0b4af9c9d235e28f01dfbf710b0a34ec240e146a82006c133a6058d0942accf6" Dec 05 17:28:14 crc kubenswrapper[4753]: I1205 17:28:14.366887 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-9c622" Dec 05 17:28:14 crc kubenswrapper[4753]: I1205 17:28:14.407708 4753 scope.go:117] "RemoveContainer" containerID="0b4af9c9d235e28f01dfbf710b0a34ec240e146a82006c133a6058d0942accf6" Dec 05 17:28:14 crc kubenswrapper[4753]: E1205 17:28:14.409923 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0b4af9c9d235e28f01dfbf710b0a34ec240e146a82006c133a6058d0942accf6\": container with ID starting with 0b4af9c9d235e28f01dfbf710b0a34ec240e146a82006c133a6058d0942accf6 not found: ID does not exist" containerID="0b4af9c9d235e28f01dfbf710b0a34ec240e146a82006c133a6058d0942accf6" Dec 05 17:28:14 crc kubenswrapper[4753]: I1205 17:28:14.409992 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0b4af9c9d235e28f01dfbf710b0a34ec240e146a82006c133a6058d0942accf6"} err="failed to get container status \"0b4af9c9d235e28f01dfbf710b0a34ec240e146a82006c133a6058d0942accf6\": rpc error: code = NotFound desc = could not find container \"0b4af9c9d235e28f01dfbf710b0a34ec240e146a82006c133a6058d0942accf6\": container with ID starting with 0b4af9c9d235e28f01dfbf710b0a34ec240e146a82006c133a6058d0942accf6 not found: ID does not exist" Dec 05 17:28:14 crc kubenswrapper[4753]: I1205 17:28:14.420216 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-9c622"] Dec 05 17:28:14 crc kubenswrapper[4753]: I1205 17:28:14.426784 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-9c622"] Dec 05 17:28:15 crc kubenswrapper[4753]: I1205 17:28:15.370730 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83bhvd4"] Dec 05 17:28:15 crc kubenswrapper[4753]: E1205 17:28:15.371126 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48038042-7b0f-48d9-9f90-6c0b9dd179d6" containerName="console" Dec 05 17:28:15 crc kubenswrapper[4753]: I1205 17:28:15.371163 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="48038042-7b0f-48d9-9f90-6c0b9dd179d6" containerName="console" Dec 05 17:28:15 crc kubenswrapper[4753]: I1205 17:28:15.371318 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="48038042-7b0f-48d9-9f90-6c0b9dd179d6" containerName="console" Dec 05 17:28:15 crc kubenswrapper[4753]: I1205 17:28:15.372667 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83bhvd4" Dec 05 17:28:15 crc kubenswrapper[4753]: I1205 17:28:15.379312 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 05 17:28:15 crc kubenswrapper[4753]: I1205 17:28:15.383992 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83bhvd4"] Dec 05 17:28:15 crc kubenswrapper[4753]: I1205 17:28:15.472814 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/83d761d5-40da-46c2-b378-cd1cde770ccd-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83bhvd4\" (UID: \"83d761d5-40da-46c2-b378-cd1cde770ccd\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83bhvd4" Dec 05 17:28:15 crc kubenswrapper[4753]: I1205 17:28:15.472873 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nt25p\" (UniqueName: \"kubernetes.io/projected/83d761d5-40da-46c2-b378-cd1cde770ccd-kube-api-access-nt25p\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83bhvd4\" (UID: \"83d761d5-40da-46c2-b378-cd1cde770ccd\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83bhvd4" Dec 05 17:28:15 crc kubenswrapper[4753]: I1205 17:28:15.472924 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/83d761d5-40da-46c2-b378-cd1cde770ccd-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83bhvd4\" (UID: \"83d761d5-40da-46c2-b378-cd1cde770ccd\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83bhvd4" Dec 05 17:28:15 crc kubenswrapper[4753]: I1205 17:28:15.575859 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/83d761d5-40da-46c2-b378-cd1cde770ccd-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83bhvd4\" (UID: \"83d761d5-40da-46c2-b378-cd1cde770ccd\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83bhvd4" Dec 05 17:28:15 crc kubenswrapper[4753]: I1205 17:28:15.574618 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/83d761d5-40da-46c2-b378-cd1cde770ccd-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83bhvd4\" (UID: \"83d761d5-40da-46c2-b378-cd1cde770ccd\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83bhvd4" Dec 05 17:28:15 crc kubenswrapper[4753]: I1205 17:28:15.576035 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nt25p\" (UniqueName: \"kubernetes.io/projected/83d761d5-40da-46c2-b378-cd1cde770ccd-kube-api-access-nt25p\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83bhvd4\" (UID: \"83d761d5-40da-46c2-b378-cd1cde770ccd\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83bhvd4" Dec 05 17:28:15 crc kubenswrapper[4753]: I1205 17:28:15.576205 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/83d761d5-40da-46c2-b378-cd1cde770ccd-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83bhvd4\" (UID: \"83d761d5-40da-46c2-b378-cd1cde770ccd\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83bhvd4" Dec 05 17:28:15 crc kubenswrapper[4753]: I1205 17:28:15.576716 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/83d761d5-40da-46c2-b378-cd1cde770ccd-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83bhvd4\" (UID: \"83d761d5-40da-46c2-b378-cd1cde770ccd\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83bhvd4" Dec 05 17:28:15 crc kubenswrapper[4753]: I1205 17:28:15.609096 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nt25p\" (UniqueName: \"kubernetes.io/projected/83d761d5-40da-46c2-b378-cd1cde770ccd-kube-api-access-nt25p\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83bhvd4\" (UID: \"83d761d5-40da-46c2-b378-cd1cde770ccd\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83bhvd4" Dec 05 17:28:15 crc kubenswrapper[4753]: I1205 17:28:15.693087 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83bhvd4" Dec 05 17:28:15 crc kubenswrapper[4753]: I1205 17:28:15.736433 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="48038042-7b0f-48d9-9f90-6c0b9dd179d6" path="/var/lib/kubelet/pods/48038042-7b0f-48d9-9f90-6c0b9dd179d6/volumes" Dec 05 17:28:15 crc kubenswrapper[4753]: I1205 17:28:15.980053 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83bhvd4"] Dec 05 17:28:16 crc kubenswrapper[4753]: I1205 17:28:16.390849 4753 generic.go:334] "Generic (PLEG): container finished" podID="83d761d5-40da-46c2-b378-cd1cde770ccd" containerID="0508371d5e309c5715549fed98f0e2059802a4b85c4df3b6418688f2f46e85da" exitCode=0 Dec 05 17:28:16 crc kubenswrapper[4753]: I1205 17:28:16.390925 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83bhvd4" event={"ID":"83d761d5-40da-46c2-b378-cd1cde770ccd","Type":"ContainerDied","Data":"0508371d5e309c5715549fed98f0e2059802a4b85c4df3b6418688f2f46e85da"} Dec 05 17:28:16 crc kubenswrapper[4753]: I1205 17:28:16.391409 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83bhvd4" event={"ID":"83d761d5-40da-46c2-b378-cd1cde770ccd","Type":"ContainerStarted","Data":"2c07768356f3e6a88140600586eb483799a6ddf4e48a1a492184c2259196f412"} Dec 05 17:28:17 crc kubenswrapper[4753]: I1205 17:28:17.737703 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-fgx5d"] Dec 05 17:28:17 crc kubenswrapper[4753]: I1205 17:28:17.739638 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fgx5d" Dec 05 17:28:17 crc kubenswrapper[4753]: I1205 17:28:17.762855 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fgx5d"] Dec 05 17:28:17 crc kubenswrapper[4753]: I1205 17:28:17.819360 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7vnlg\" (UniqueName: \"kubernetes.io/projected/4537eed6-18dc-49b6-8711-be48d8502029-kube-api-access-7vnlg\") pod \"redhat-operators-fgx5d\" (UID: \"4537eed6-18dc-49b6-8711-be48d8502029\") " pod="openshift-marketplace/redhat-operators-fgx5d" Dec 05 17:28:17 crc kubenswrapper[4753]: I1205 17:28:17.819714 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4537eed6-18dc-49b6-8711-be48d8502029-utilities\") pod \"redhat-operators-fgx5d\" (UID: \"4537eed6-18dc-49b6-8711-be48d8502029\") " pod="openshift-marketplace/redhat-operators-fgx5d" Dec 05 17:28:17 crc kubenswrapper[4753]: I1205 17:28:17.819872 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4537eed6-18dc-49b6-8711-be48d8502029-catalog-content\") pod \"redhat-operators-fgx5d\" (UID: \"4537eed6-18dc-49b6-8711-be48d8502029\") " pod="openshift-marketplace/redhat-operators-fgx5d" Dec 05 17:28:17 crc kubenswrapper[4753]: I1205 17:28:17.922076 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4537eed6-18dc-49b6-8711-be48d8502029-utilities\") pod \"redhat-operators-fgx5d\" (UID: \"4537eed6-18dc-49b6-8711-be48d8502029\") " pod="openshift-marketplace/redhat-operators-fgx5d" Dec 05 17:28:17 crc kubenswrapper[4753]: I1205 17:28:17.922667 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4537eed6-18dc-49b6-8711-be48d8502029-catalog-content\") pod \"redhat-operators-fgx5d\" (UID: \"4537eed6-18dc-49b6-8711-be48d8502029\") " pod="openshift-marketplace/redhat-operators-fgx5d" Dec 05 17:28:17 crc kubenswrapper[4753]: I1205 17:28:17.922720 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7vnlg\" (UniqueName: \"kubernetes.io/projected/4537eed6-18dc-49b6-8711-be48d8502029-kube-api-access-7vnlg\") pod \"redhat-operators-fgx5d\" (UID: \"4537eed6-18dc-49b6-8711-be48d8502029\") " pod="openshift-marketplace/redhat-operators-fgx5d" Dec 05 17:28:17 crc kubenswrapper[4753]: I1205 17:28:17.923091 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4537eed6-18dc-49b6-8711-be48d8502029-utilities\") pod \"redhat-operators-fgx5d\" (UID: \"4537eed6-18dc-49b6-8711-be48d8502029\") " pod="openshift-marketplace/redhat-operators-fgx5d" Dec 05 17:28:17 crc kubenswrapper[4753]: I1205 17:28:17.923231 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4537eed6-18dc-49b6-8711-be48d8502029-catalog-content\") pod \"redhat-operators-fgx5d\" (UID: \"4537eed6-18dc-49b6-8711-be48d8502029\") " pod="openshift-marketplace/redhat-operators-fgx5d" Dec 05 17:28:17 crc kubenswrapper[4753]: I1205 17:28:17.953391 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7vnlg\" (UniqueName: \"kubernetes.io/projected/4537eed6-18dc-49b6-8711-be48d8502029-kube-api-access-7vnlg\") pod \"redhat-operators-fgx5d\" (UID: \"4537eed6-18dc-49b6-8711-be48d8502029\") " pod="openshift-marketplace/redhat-operators-fgx5d" Dec 05 17:28:18 crc kubenswrapper[4753]: I1205 17:28:18.120999 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fgx5d" Dec 05 17:28:18 crc kubenswrapper[4753]: I1205 17:28:18.354916 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fgx5d"] Dec 05 17:28:18 crc kubenswrapper[4753]: W1205 17:28:18.360820 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4537eed6_18dc_49b6_8711_be48d8502029.slice/crio-efd975baa7f8128fc27bb4d559cf74f106db50129324b12054df267859f781cd WatchSource:0}: Error finding container efd975baa7f8128fc27bb4d559cf74f106db50129324b12054df267859f781cd: Status 404 returned error can't find the container with id efd975baa7f8128fc27bb4d559cf74f106db50129324b12054df267859f781cd Dec 05 17:28:18 crc kubenswrapper[4753]: I1205 17:28:18.411116 4753 generic.go:334] "Generic (PLEG): container finished" podID="83d761d5-40da-46c2-b378-cd1cde770ccd" containerID="ec634f343cebe97d0cc3ff361deebb6179d1b5ae66906ec341c1f5ac999fde6e" exitCode=0 Dec 05 17:28:18 crc kubenswrapper[4753]: I1205 17:28:18.411214 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83bhvd4" event={"ID":"83d761d5-40da-46c2-b378-cd1cde770ccd","Type":"ContainerDied","Data":"ec634f343cebe97d0cc3ff361deebb6179d1b5ae66906ec341c1f5ac999fde6e"} Dec 05 17:28:18 crc kubenswrapper[4753]: I1205 17:28:18.413940 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fgx5d" event={"ID":"4537eed6-18dc-49b6-8711-be48d8502029","Type":"ContainerStarted","Data":"efd975baa7f8128fc27bb4d559cf74f106db50129324b12054df267859f781cd"} Dec 05 17:28:19 crc kubenswrapper[4753]: I1205 17:28:19.434729 4753 generic.go:334] "Generic (PLEG): container finished" podID="4537eed6-18dc-49b6-8711-be48d8502029" containerID="c18da6cc756c9848c7a18f38c864cb5f075a7c7d7d422cc0cdc6a598c084b72b" exitCode=0 Dec 05 17:28:19 crc kubenswrapper[4753]: I1205 17:28:19.434835 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fgx5d" event={"ID":"4537eed6-18dc-49b6-8711-be48d8502029","Type":"ContainerDied","Data":"c18da6cc756c9848c7a18f38c864cb5f075a7c7d7d422cc0cdc6a598c084b72b"} Dec 05 17:28:19 crc kubenswrapper[4753]: I1205 17:28:19.439951 4753 generic.go:334] "Generic (PLEG): container finished" podID="83d761d5-40da-46c2-b378-cd1cde770ccd" containerID="a5784bc8715e0377303d58ca6015ed03c0941b9276190291d08082ae7250231a" exitCode=0 Dec 05 17:28:19 crc kubenswrapper[4753]: I1205 17:28:19.440447 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83bhvd4" event={"ID":"83d761d5-40da-46c2-b378-cd1cde770ccd","Type":"ContainerDied","Data":"a5784bc8715e0377303d58ca6015ed03c0941b9276190291d08082ae7250231a"} Dec 05 17:28:20 crc kubenswrapper[4753]: I1205 17:28:20.452455 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fgx5d" event={"ID":"4537eed6-18dc-49b6-8711-be48d8502029","Type":"ContainerStarted","Data":"8b773cd232ccad6f7ef5c53a6c67faa3f493c0feaafd1b408ed66b7c2b7a1087"} Dec 05 17:28:20 crc kubenswrapper[4753]: I1205 17:28:20.779858 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83bhvd4" Dec 05 17:28:20 crc kubenswrapper[4753]: I1205 17:28:20.873890 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nt25p\" (UniqueName: \"kubernetes.io/projected/83d761d5-40da-46c2-b378-cd1cde770ccd-kube-api-access-nt25p\") pod \"83d761d5-40da-46c2-b378-cd1cde770ccd\" (UID: \"83d761d5-40da-46c2-b378-cd1cde770ccd\") " Dec 05 17:28:20 crc kubenswrapper[4753]: I1205 17:28:20.873987 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/83d761d5-40da-46c2-b378-cd1cde770ccd-bundle\") pod \"83d761d5-40da-46c2-b378-cd1cde770ccd\" (UID: \"83d761d5-40da-46c2-b378-cd1cde770ccd\") " Dec 05 17:28:20 crc kubenswrapper[4753]: I1205 17:28:20.874035 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/83d761d5-40da-46c2-b378-cd1cde770ccd-util\") pod \"83d761d5-40da-46c2-b378-cd1cde770ccd\" (UID: \"83d761d5-40da-46c2-b378-cd1cde770ccd\") " Dec 05 17:28:20 crc kubenswrapper[4753]: I1205 17:28:20.877094 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/83d761d5-40da-46c2-b378-cd1cde770ccd-bundle" (OuterVolumeSpecName: "bundle") pod "83d761d5-40da-46c2-b378-cd1cde770ccd" (UID: "83d761d5-40da-46c2-b378-cd1cde770ccd"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:28:20 crc kubenswrapper[4753]: I1205 17:28:20.881917 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/83d761d5-40da-46c2-b378-cd1cde770ccd-kube-api-access-nt25p" (OuterVolumeSpecName: "kube-api-access-nt25p") pod "83d761d5-40da-46c2-b378-cd1cde770ccd" (UID: "83d761d5-40da-46c2-b378-cd1cde770ccd"). InnerVolumeSpecName "kube-api-access-nt25p". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:28:20 crc kubenswrapper[4753]: I1205 17:28:20.889673 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/83d761d5-40da-46c2-b378-cd1cde770ccd-util" (OuterVolumeSpecName: "util") pod "83d761d5-40da-46c2-b378-cd1cde770ccd" (UID: "83d761d5-40da-46c2-b378-cd1cde770ccd"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:28:20 crc kubenswrapper[4753]: I1205 17:28:20.976698 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nt25p\" (UniqueName: \"kubernetes.io/projected/83d761d5-40da-46c2-b378-cd1cde770ccd-kube-api-access-nt25p\") on node \"crc\" DevicePath \"\"" Dec 05 17:28:20 crc kubenswrapper[4753]: I1205 17:28:20.976770 4753 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/83d761d5-40da-46c2-b378-cd1cde770ccd-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:28:20 crc kubenswrapper[4753]: I1205 17:28:20.976791 4753 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/83d761d5-40da-46c2-b378-cd1cde770ccd-util\") on node \"crc\" DevicePath \"\"" Dec 05 17:28:21 crc kubenswrapper[4753]: I1205 17:28:21.461953 4753 generic.go:334] "Generic (PLEG): container finished" podID="4537eed6-18dc-49b6-8711-be48d8502029" containerID="8b773cd232ccad6f7ef5c53a6c67faa3f493c0feaafd1b408ed66b7c2b7a1087" exitCode=0 Dec 05 17:28:21 crc kubenswrapper[4753]: I1205 17:28:21.462071 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fgx5d" event={"ID":"4537eed6-18dc-49b6-8711-be48d8502029","Type":"ContainerDied","Data":"8b773cd232ccad6f7ef5c53a6c67faa3f493c0feaafd1b408ed66b7c2b7a1087"} Dec 05 17:28:21 crc kubenswrapper[4753]: I1205 17:28:21.467012 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83bhvd4" event={"ID":"83d761d5-40da-46c2-b378-cd1cde770ccd","Type":"ContainerDied","Data":"2c07768356f3e6a88140600586eb483799a6ddf4e48a1a492184c2259196f412"} Dec 05 17:28:21 crc kubenswrapper[4753]: I1205 17:28:21.467067 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2c07768356f3e6a88140600586eb483799a6ddf4e48a1a492184c2259196f412" Dec 05 17:28:21 crc kubenswrapper[4753]: I1205 17:28:21.467080 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83bhvd4" Dec 05 17:28:22 crc kubenswrapper[4753]: I1205 17:28:22.475279 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fgx5d" event={"ID":"4537eed6-18dc-49b6-8711-be48d8502029","Type":"ContainerStarted","Data":"9545a2f2578b02c6b16a60b125b8530ed6487ad5ec6039e9a512789f48122f90"} Dec 05 17:28:22 crc kubenswrapper[4753]: I1205 17:28:22.505143 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-fgx5d" podStartSLOduration=3.059617132 podStartE2EDuration="5.505112011s" podCreationTimestamp="2025-12-05 17:28:17 +0000 UTC" firstStartedPulling="2025-12-05 17:28:19.437235815 +0000 UTC m=+1437.940342821" lastFinishedPulling="2025-12-05 17:28:21.882730694 +0000 UTC m=+1440.385837700" observedRunningTime="2025-12-05 17:28:22.50437584 +0000 UTC m=+1441.007482856" watchObservedRunningTime="2025-12-05 17:28:22.505112011 +0000 UTC m=+1441.008219027" Dec 05 17:28:28 crc kubenswrapper[4753]: I1205 17:28:28.121374 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-fgx5d" Dec 05 17:28:28 crc kubenswrapper[4753]: I1205 17:28:28.122371 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-fgx5d" Dec 05 17:28:28 crc kubenswrapper[4753]: I1205 17:28:28.979242 4753 patch_prober.go:28] interesting pod/machine-config-daemon-khn68 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 17:28:28 crc kubenswrapper[4753]: I1205 17:28:28.979566 4753 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 17:28:29 crc kubenswrapper[4753]: I1205 17:28:29.176604 4753 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-fgx5d" podUID="4537eed6-18dc-49b6-8711-be48d8502029" containerName="registry-server" probeResult="failure" output=< Dec 05 17:28:29 crc kubenswrapper[4753]: timeout: failed to connect service ":50051" within 1s Dec 05 17:28:29 crc kubenswrapper[4753]: > Dec 05 17:28:30 crc kubenswrapper[4753]: I1205 17:28:30.708618 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-79f6547568-z26rz"] Dec 05 17:28:30 crc kubenswrapper[4753]: E1205 17:28:30.709538 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83d761d5-40da-46c2-b378-cd1cde770ccd" containerName="extract" Dec 05 17:28:30 crc kubenswrapper[4753]: I1205 17:28:30.709557 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="83d761d5-40da-46c2-b378-cd1cde770ccd" containerName="extract" Dec 05 17:28:30 crc kubenswrapper[4753]: E1205 17:28:30.709591 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83d761d5-40da-46c2-b378-cd1cde770ccd" containerName="pull" Dec 05 17:28:30 crc kubenswrapper[4753]: I1205 17:28:30.709599 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="83d761d5-40da-46c2-b378-cd1cde770ccd" containerName="pull" Dec 05 17:28:30 crc kubenswrapper[4753]: E1205 17:28:30.709611 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83d761d5-40da-46c2-b378-cd1cde770ccd" containerName="util" Dec 05 17:28:30 crc kubenswrapper[4753]: I1205 17:28:30.709619 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="83d761d5-40da-46c2-b378-cd1cde770ccd" containerName="util" Dec 05 17:28:30 crc kubenswrapper[4753]: I1205 17:28:30.709764 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="83d761d5-40da-46c2-b378-cd1cde770ccd" containerName="extract" Dec 05 17:28:30 crc kubenswrapper[4753]: I1205 17:28:30.710440 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-79f6547568-z26rz" Dec 05 17:28:30 crc kubenswrapper[4753]: I1205 17:28:30.713721 4753 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Dec 05 17:28:30 crc kubenswrapper[4753]: I1205 17:28:30.715617 4753 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-qvgq4" Dec 05 17:28:30 crc kubenswrapper[4753]: I1205 17:28:30.716216 4753 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Dec 05 17:28:30 crc kubenswrapper[4753]: I1205 17:28:30.716382 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Dec 05 17:28:30 crc kubenswrapper[4753]: I1205 17:28:30.719299 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Dec 05 17:28:30 crc kubenswrapper[4753]: I1205 17:28:30.730906 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-79f6547568-z26rz"] Dec 05 17:28:30 crc kubenswrapper[4753]: I1205 17:28:30.853550 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/44de2355-c97c-4421-87b6-1e7301bf430b-apiservice-cert\") pod \"metallb-operator-controller-manager-79f6547568-z26rz\" (UID: \"44de2355-c97c-4421-87b6-1e7301bf430b\") " pod="metallb-system/metallb-operator-controller-manager-79f6547568-z26rz" Dec 05 17:28:30 crc kubenswrapper[4753]: I1205 17:28:30.853620 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/44de2355-c97c-4421-87b6-1e7301bf430b-webhook-cert\") pod \"metallb-operator-controller-manager-79f6547568-z26rz\" (UID: \"44de2355-c97c-4421-87b6-1e7301bf430b\") " pod="metallb-system/metallb-operator-controller-manager-79f6547568-z26rz" Dec 05 17:28:30 crc kubenswrapper[4753]: I1205 17:28:30.853792 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mhxf6\" (UniqueName: \"kubernetes.io/projected/44de2355-c97c-4421-87b6-1e7301bf430b-kube-api-access-mhxf6\") pod \"metallb-operator-controller-manager-79f6547568-z26rz\" (UID: \"44de2355-c97c-4421-87b6-1e7301bf430b\") " pod="metallb-system/metallb-operator-controller-manager-79f6547568-z26rz" Dec 05 17:28:30 crc kubenswrapper[4753]: I1205 17:28:30.941561 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-7c4d66c645-ptcch"] Dec 05 17:28:30 crc kubenswrapper[4753]: I1205 17:28:30.942550 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-7c4d66c645-ptcch" Dec 05 17:28:30 crc kubenswrapper[4753]: I1205 17:28:30.947125 4753 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-dssv6" Dec 05 17:28:30 crc kubenswrapper[4753]: I1205 17:28:30.947519 4753 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 05 17:28:30 crc kubenswrapper[4753]: I1205 17:28:30.947682 4753 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Dec 05 17:28:30 crc kubenswrapper[4753]: I1205 17:28:30.956582 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/44de2355-c97c-4421-87b6-1e7301bf430b-apiservice-cert\") pod \"metallb-operator-controller-manager-79f6547568-z26rz\" (UID: \"44de2355-c97c-4421-87b6-1e7301bf430b\") " pod="metallb-system/metallb-operator-controller-manager-79f6547568-z26rz" Dec 05 17:28:30 crc kubenswrapper[4753]: I1205 17:28:30.956669 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/44de2355-c97c-4421-87b6-1e7301bf430b-webhook-cert\") pod \"metallb-operator-controller-manager-79f6547568-z26rz\" (UID: \"44de2355-c97c-4421-87b6-1e7301bf430b\") " pod="metallb-system/metallb-operator-controller-manager-79f6547568-z26rz" Dec 05 17:28:30 crc kubenswrapper[4753]: I1205 17:28:30.958074 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mhxf6\" (UniqueName: \"kubernetes.io/projected/44de2355-c97c-4421-87b6-1e7301bf430b-kube-api-access-mhxf6\") pod \"metallb-operator-controller-manager-79f6547568-z26rz\" (UID: \"44de2355-c97c-4421-87b6-1e7301bf430b\") " pod="metallb-system/metallb-operator-controller-manager-79f6547568-z26rz" Dec 05 17:28:30 crc kubenswrapper[4753]: I1205 17:28:30.962975 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-7c4d66c645-ptcch"] Dec 05 17:28:30 crc kubenswrapper[4753]: I1205 17:28:30.964433 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/44de2355-c97c-4421-87b6-1e7301bf430b-webhook-cert\") pod \"metallb-operator-controller-manager-79f6547568-z26rz\" (UID: \"44de2355-c97c-4421-87b6-1e7301bf430b\") " pod="metallb-system/metallb-operator-controller-manager-79f6547568-z26rz" Dec 05 17:28:30 crc kubenswrapper[4753]: I1205 17:28:30.966810 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/44de2355-c97c-4421-87b6-1e7301bf430b-apiservice-cert\") pod \"metallb-operator-controller-manager-79f6547568-z26rz\" (UID: \"44de2355-c97c-4421-87b6-1e7301bf430b\") " pod="metallb-system/metallb-operator-controller-manager-79f6547568-z26rz" Dec 05 17:28:30 crc kubenswrapper[4753]: I1205 17:28:30.990046 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mhxf6\" (UniqueName: \"kubernetes.io/projected/44de2355-c97c-4421-87b6-1e7301bf430b-kube-api-access-mhxf6\") pod \"metallb-operator-controller-manager-79f6547568-z26rz\" (UID: \"44de2355-c97c-4421-87b6-1e7301bf430b\") " pod="metallb-system/metallb-operator-controller-manager-79f6547568-z26rz" Dec 05 17:28:31 crc kubenswrapper[4753]: I1205 17:28:31.032369 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-79f6547568-z26rz" Dec 05 17:28:31 crc kubenswrapper[4753]: I1205 17:28:31.059230 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z855p\" (UniqueName: \"kubernetes.io/projected/15c925fa-97bb-4d10-b85d-b451adac7306-kube-api-access-z855p\") pod \"metallb-operator-webhook-server-7c4d66c645-ptcch\" (UID: \"15c925fa-97bb-4d10-b85d-b451adac7306\") " pod="metallb-system/metallb-operator-webhook-server-7c4d66c645-ptcch" Dec 05 17:28:31 crc kubenswrapper[4753]: I1205 17:28:31.059343 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/15c925fa-97bb-4d10-b85d-b451adac7306-apiservice-cert\") pod \"metallb-operator-webhook-server-7c4d66c645-ptcch\" (UID: \"15c925fa-97bb-4d10-b85d-b451adac7306\") " pod="metallb-system/metallb-operator-webhook-server-7c4d66c645-ptcch" Dec 05 17:28:31 crc kubenswrapper[4753]: I1205 17:28:31.059412 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/15c925fa-97bb-4d10-b85d-b451adac7306-webhook-cert\") pod \"metallb-operator-webhook-server-7c4d66c645-ptcch\" (UID: \"15c925fa-97bb-4d10-b85d-b451adac7306\") " pod="metallb-system/metallb-operator-webhook-server-7c4d66c645-ptcch" Dec 05 17:28:31 crc kubenswrapper[4753]: I1205 17:28:31.160786 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/15c925fa-97bb-4d10-b85d-b451adac7306-webhook-cert\") pod \"metallb-operator-webhook-server-7c4d66c645-ptcch\" (UID: \"15c925fa-97bb-4d10-b85d-b451adac7306\") " pod="metallb-system/metallb-operator-webhook-server-7c4d66c645-ptcch" Dec 05 17:28:31 crc kubenswrapper[4753]: I1205 17:28:31.160885 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z855p\" (UniqueName: \"kubernetes.io/projected/15c925fa-97bb-4d10-b85d-b451adac7306-kube-api-access-z855p\") pod \"metallb-operator-webhook-server-7c4d66c645-ptcch\" (UID: \"15c925fa-97bb-4d10-b85d-b451adac7306\") " pod="metallb-system/metallb-operator-webhook-server-7c4d66c645-ptcch" Dec 05 17:28:31 crc kubenswrapper[4753]: I1205 17:28:31.160956 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/15c925fa-97bb-4d10-b85d-b451adac7306-apiservice-cert\") pod \"metallb-operator-webhook-server-7c4d66c645-ptcch\" (UID: \"15c925fa-97bb-4d10-b85d-b451adac7306\") " pod="metallb-system/metallb-operator-webhook-server-7c4d66c645-ptcch" Dec 05 17:28:31 crc kubenswrapper[4753]: I1205 17:28:31.167262 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/15c925fa-97bb-4d10-b85d-b451adac7306-webhook-cert\") pod \"metallb-operator-webhook-server-7c4d66c645-ptcch\" (UID: \"15c925fa-97bb-4d10-b85d-b451adac7306\") " pod="metallb-system/metallb-operator-webhook-server-7c4d66c645-ptcch" Dec 05 17:28:31 crc kubenswrapper[4753]: I1205 17:28:31.167738 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/15c925fa-97bb-4d10-b85d-b451adac7306-apiservice-cert\") pod \"metallb-operator-webhook-server-7c4d66c645-ptcch\" (UID: \"15c925fa-97bb-4d10-b85d-b451adac7306\") " pod="metallb-system/metallb-operator-webhook-server-7c4d66c645-ptcch" Dec 05 17:28:31 crc kubenswrapper[4753]: I1205 17:28:31.183217 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z855p\" (UniqueName: \"kubernetes.io/projected/15c925fa-97bb-4d10-b85d-b451adac7306-kube-api-access-z855p\") pod \"metallb-operator-webhook-server-7c4d66c645-ptcch\" (UID: \"15c925fa-97bb-4d10-b85d-b451adac7306\") " pod="metallb-system/metallb-operator-webhook-server-7c4d66c645-ptcch" Dec 05 17:28:31 crc kubenswrapper[4753]: I1205 17:28:31.260779 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-7c4d66c645-ptcch" Dec 05 17:28:31 crc kubenswrapper[4753]: I1205 17:28:31.501057 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-79f6547568-z26rz"] Dec 05 17:28:31 crc kubenswrapper[4753]: I1205 17:28:31.549643 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-79f6547568-z26rz" event={"ID":"44de2355-c97c-4421-87b6-1e7301bf430b","Type":"ContainerStarted","Data":"9650b3bfb7a92639044138b21ded3fe19a6e63a14f50e3f589ac00645738fd10"} Dec 05 17:28:31 crc kubenswrapper[4753]: I1205 17:28:31.701621 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-7c4d66c645-ptcch"] Dec 05 17:28:31 crc kubenswrapper[4753]: W1205 17:28:31.705774 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod15c925fa_97bb_4d10_b85d_b451adac7306.slice/crio-14ddaa94e0479f76772508c28befc1a206cc510f29d7cf5a862d00db9e68d12d WatchSource:0}: Error finding container 14ddaa94e0479f76772508c28befc1a206cc510f29d7cf5a862d00db9e68d12d: Status 404 returned error can't find the container with id 14ddaa94e0479f76772508c28befc1a206cc510f29d7cf5a862d00db9e68d12d Dec 05 17:28:32 crc kubenswrapper[4753]: I1205 17:28:32.565599 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-7c4d66c645-ptcch" event={"ID":"15c925fa-97bb-4d10-b85d-b451adac7306","Type":"ContainerStarted","Data":"14ddaa94e0479f76772508c28befc1a206cc510f29d7cf5a862d00db9e68d12d"} Dec 05 17:28:38 crc kubenswrapper[4753]: I1205 17:28:38.225666 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-fgx5d" Dec 05 17:28:38 crc kubenswrapper[4753]: I1205 17:28:38.302413 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-fgx5d" Dec 05 17:28:38 crc kubenswrapper[4753]: I1205 17:28:38.629921 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-7c4d66c645-ptcch" event={"ID":"15c925fa-97bb-4d10-b85d-b451adac7306","Type":"ContainerStarted","Data":"39ac853943e3029388cf4b7db4cdc1718da4e9e445b85436fbb8e5d26bd90b49"} Dec 05 17:28:38 crc kubenswrapper[4753]: I1205 17:28:38.630062 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-7c4d66c645-ptcch" Dec 05 17:28:38 crc kubenswrapper[4753]: I1205 17:28:38.632187 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-79f6547568-z26rz" event={"ID":"44de2355-c97c-4421-87b6-1e7301bf430b","Type":"ContainerStarted","Data":"23496373510affcbe59a489f8ba061d42e28063d05d770e3cc451f00e7dcdebd"} Dec 05 17:28:38 crc kubenswrapper[4753]: I1205 17:28:38.669318 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-7c4d66c645-ptcch" podStartSLOduration=2.825770517 podStartE2EDuration="8.669283972s" podCreationTimestamp="2025-12-05 17:28:30 +0000 UTC" firstStartedPulling="2025-12-05 17:28:31.709087437 +0000 UTC m=+1450.212194443" lastFinishedPulling="2025-12-05 17:28:37.552600892 +0000 UTC m=+1456.055707898" observedRunningTime="2025-12-05 17:28:38.660205495 +0000 UTC m=+1457.163312511" watchObservedRunningTime="2025-12-05 17:28:38.669283972 +0000 UTC m=+1457.172391018" Dec 05 17:28:39 crc kubenswrapper[4753]: I1205 17:28:39.106862 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-79f6547568-z26rz" podStartSLOduration=3.112918499 podStartE2EDuration="9.106835069s" podCreationTimestamp="2025-12-05 17:28:30 +0000 UTC" firstStartedPulling="2025-12-05 17:28:31.541093805 +0000 UTC m=+1450.044200811" lastFinishedPulling="2025-12-05 17:28:37.535010375 +0000 UTC m=+1456.038117381" observedRunningTime="2025-12-05 17:28:38.693940858 +0000 UTC m=+1457.197047904" watchObservedRunningTime="2025-12-05 17:28:39.106835069 +0000 UTC m=+1457.609942075" Dec 05 17:28:39 crc kubenswrapper[4753]: I1205 17:28:39.110912 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-fgx5d"] Dec 05 17:28:39 crc kubenswrapper[4753]: I1205 17:28:39.638592 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-79f6547568-z26rz" Dec 05 17:28:39 crc kubenswrapper[4753]: I1205 17:28:39.639009 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-fgx5d" podUID="4537eed6-18dc-49b6-8711-be48d8502029" containerName="registry-server" containerID="cri-o://9545a2f2578b02c6b16a60b125b8530ed6487ad5ec6039e9a512789f48122f90" gracePeriod=2 Dec 05 17:28:40 crc kubenswrapper[4753]: I1205 17:28:40.563483 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fgx5d" Dec 05 17:28:40 crc kubenswrapper[4753]: I1205 17:28:40.629970 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4537eed6-18dc-49b6-8711-be48d8502029-utilities\") pod \"4537eed6-18dc-49b6-8711-be48d8502029\" (UID: \"4537eed6-18dc-49b6-8711-be48d8502029\") " Dec 05 17:28:40 crc kubenswrapper[4753]: I1205 17:28:40.630112 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7vnlg\" (UniqueName: \"kubernetes.io/projected/4537eed6-18dc-49b6-8711-be48d8502029-kube-api-access-7vnlg\") pod \"4537eed6-18dc-49b6-8711-be48d8502029\" (UID: \"4537eed6-18dc-49b6-8711-be48d8502029\") " Dec 05 17:28:40 crc kubenswrapper[4753]: I1205 17:28:40.630284 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4537eed6-18dc-49b6-8711-be48d8502029-catalog-content\") pod \"4537eed6-18dc-49b6-8711-be48d8502029\" (UID: \"4537eed6-18dc-49b6-8711-be48d8502029\") " Dec 05 17:28:40 crc kubenswrapper[4753]: I1205 17:28:40.630967 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4537eed6-18dc-49b6-8711-be48d8502029-utilities" (OuterVolumeSpecName: "utilities") pod "4537eed6-18dc-49b6-8711-be48d8502029" (UID: "4537eed6-18dc-49b6-8711-be48d8502029"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:28:40 crc kubenswrapper[4753]: I1205 17:28:40.638951 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4537eed6-18dc-49b6-8711-be48d8502029-kube-api-access-7vnlg" (OuterVolumeSpecName: "kube-api-access-7vnlg") pod "4537eed6-18dc-49b6-8711-be48d8502029" (UID: "4537eed6-18dc-49b6-8711-be48d8502029"). InnerVolumeSpecName "kube-api-access-7vnlg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:28:40 crc kubenswrapper[4753]: I1205 17:28:40.649458 4753 generic.go:334] "Generic (PLEG): container finished" podID="4537eed6-18dc-49b6-8711-be48d8502029" containerID="9545a2f2578b02c6b16a60b125b8530ed6487ad5ec6039e9a512789f48122f90" exitCode=0 Dec 05 17:28:40 crc kubenswrapper[4753]: I1205 17:28:40.649546 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fgx5d" Dec 05 17:28:40 crc kubenswrapper[4753]: I1205 17:28:40.649538 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fgx5d" event={"ID":"4537eed6-18dc-49b6-8711-be48d8502029","Type":"ContainerDied","Data":"9545a2f2578b02c6b16a60b125b8530ed6487ad5ec6039e9a512789f48122f90"} Dec 05 17:28:40 crc kubenswrapper[4753]: I1205 17:28:40.649629 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fgx5d" event={"ID":"4537eed6-18dc-49b6-8711-be48d8502029","Type":"ContainerDied","Data":"efd975baa7f8128fc27bb4d559cf74f106db50129324b12054df267859f781cd"} Dec 05 17:28:40 crc kubenswrapper[4753]: I1205 17:28:40.649652 4753 scope.go:117] "RemoveContainer" containerID="9545a2f2578b02c6b16a60b125b8530ed6487ad5ec6039e9a512789f48122f90" Dec 05 17:28:40 crc kubenswrapper[4753]: I1205 17:28:40.674435 4753 scope.go:117] "RemoveContainer" containerID="8b773cd232ccad6f7ef5c53a6c67faa3f493c0feaafd1b408ed66b7c2b7a1087" Dec 05 17:28:40 crc kubenswrapper[4753]: I1205 17:28:40.696963 4753 scope.go:117] "RemoveContainer" containerID="c18da6cc756c9848c7a18f38c864cb5f075a7c7d7d422cc0cdc6a598c084b72b" Dec 05 17:28:40 crc kubenswrapper[4753]: I1205 17:28:40.720264 4753 scope.go:117] "RemoveContainer" containerID="9545a2f2578b02c6b16a60b125b8530ed6487ad5ec6039e9a512789f48122f90" Dec 05 17:28:40 crc kubenswrapper[4753]: E1205 17:28:40.732428 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9545a2f2578b02c6b16a60b125b8530ed6487ad5ec6039e9a512789f48122f90\": container with ID starting with 9545a2f2578b02c6b16a60b125b8530ed6487ad5ec6039e9a512789f48122f90 not found: ID does not exist" containerID="9545a2f2578b02c6b16a60b125b8530ed6487ad5ec6039e9a512789f48122f90" Dec 05 17:28:40 crc kubenswrapper[4753]: I1205 17:28:40.732505 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9545a2f2578b02c6b16a60b125b8530ed6487ad5ec6039e9a512789f48122f90"} err="failed to get container status \"9545a2f2578b02c6b16a60b125b8530ed6487ad5ec6039e9a512789f48122f90\": rpc error: code = NotFound desc = could not find container \"9545a2f2578b02c6b16a60b125b8530ed6487ad5ec6039e9a512789f48122f90\": container with ID starting with 9545a2f2578b02c6b16a60b125b8530ed6487ad5ec6039e9a512789f48122f90 not found: ID does not exist" Dec 05 17:28:40 crc kubenswrapper[4753]: I1205 17:28:40.732547 4753 scope.go:117] "RemoveContainer" containerID="8b773cd232ccad6f7ef5c53a6c67faa3f493c0feaafd1b408ed66b7c2b7a1087" Dec 05 17:28:40 crc kubenswrapper[4753]: E1205 17:28:40.733270 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8b773cd232ccad6f7ef5c53a6c67faa3f493c0feaafd1b408ed66b7c2b7a1087\": container with ID starting with 8b773cd232ccad6f7ef5c53a6c67faa3f493c0feaafd1b408ed66b7c2b7a1087 not found: ID does not exist" containerID="8b773cd232ccad6f7ef5c53a6c67faa3f493c0feaafd1b408ed66b7c2b7a1087" Dec 05 17:28:40 crc kubenswrapper[4753]: I1205 17:28:40.733303 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8b773cd232ccad6f7ef5c53a6c67faa3f493c0feaafd1b408ed66b7c2b7a1087"} err="failed to get container status \"8b773cd232ccad6f7ef5c53a6c67faa3f493c0feaafd1b408ed66b7c2b7a1087\": rpc error: code = NotFound desc = could not find container \"8b773cd232ccad6f7ef5c53a6c67faa3f493c0feaafd1b408ed66b7c2b7a1087\": container with ID starting with 8b773cd232ccad6f7ef5c53a6c67faa3f493c0feaafd1b408ed66b7c2b7a1087 not found: ID does not exist" Dec 05 17:28:40 crc kubenswrapper[4753]: I1205 17:28:40.733320 4753 scope.go:117] "RemoveContainer" containerID="c18da6cc756c9848c7a18f38c864cb5f075a7c7d7d422cc0cdc6a598c084b72b" Dec 05 17:28:40 crc kubenswrapper[4753]: I1205 17:28:40.733674 4753 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4537eed6-18dc-49b6-8711-be48d8502029-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 17:28:40 crc kubenswrapper[4753]: E1205 17:28:40.733686 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c18da6cc756c9848c7a18f38c864cb5f075a7c7d7d422cc0cdc6a598c084b72b\": container with ID starting with c18da6cc756c9848c7a18f38c864cb5f075a7c7d7d422cc0cdc6a598c084b72b not found: ID does not exist" containerID="c18da6cc756c9848c7a18f38c864cb5f075a7c7d7d422cc0cdc6a598c084b72b" Dec 05 17:28:40 crc kubenswrapper[4753]: I1205 17:28:40.733767 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c18da6cc756c9848c7a18f38c864cb5f075a7c7d7d422cc0cdc6a598c084b72b"} err="failed to get container status \"c18da6cc756c9848c7a18f38c864cb5f075a7c7d7d422cc0cdc6a598c084b72b\": rpc error: code = NotFound desc = could not find container \"c18da6cc756c9848c7a18f38c864cb5f075a7c7d7d422cc0cdc6a598c084b72b\": container with ID starting with c18da6cc756c9848c7a18f38c864cb5f075a7c7d7d422cc0cdc6a598c084b72b not found: ID does not exist" Dec 05 17:28:40 crc kubenswrapper[4753]: I1205 17:28:40.733832 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7vnlg\" (UniqueName: \"kubernetes.io/projected/4537eed6-18dc-49b6-8711-be48d8502029-kube-api-access-7vnlg\") on node \"crc\" DevicePath \"\"" Dec 05 17:28:40 crc kubenswrapper[4753]: I1205 17:28:40.771098 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4537eed6-18dc-49b6-8711-be48d8502029-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4537eed6-18dc-49b6-8711-be48d8502029" (UID: "4537eed6-18dc-49b6-8711-be48d8502029"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:28:40 crc kubenswrapper[4753]: I1205 17:28:40.835875 4753 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4537eed6-18dc-49b6-8711-be48d8502029-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 17:28:40 crc kubenswrapper[4753]: I1205 17:28:40.993880 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-fgx5d"] Dec 05 17:28:40 crc kubenswrapper[4753]: I1205 17:28:40.998895 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-fgx5d"] Dec 05 17:28:41 crc kubenswrapper[4753]: I1205 17:28:41.730830 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4537eed6-18dc-49b6-8711-be48d8502029" path="/var/lib/kubelet/pods/4537eed6-18dc-49b6-8711-be48d8502029/volumes" Dec 05 17:28:51 crc kubenswrapper[4753]: I1205 17:28:51.268549 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-7c4d66c645-ptcch" Dec 05 17:28:58 crc kubenswrapper[4753]: I1205 17:28:58.979717 4753 patch_prober.go:28] interesting pod/machine-config-daemon-khn68 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 17:28:58 crc kubenswrapper[4753]: I1205 17:28:58.980426 4753 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 17:28:58 crc kubenswrapper[4753]: I1205 17:28:58.980498 4753 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-khn68" Dec 05 17:28:58 crc kubenswrapper[4753]: I1205 17:28:58.981330 4753 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e5f48dac911921ce787e6e9fea9709c824d57dffe736123659ddb77fb75ed48a"} pod="openshift-machine-config-operator/machine-config-daemon-khn68" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 17:28:58 crc kubenswrapper[4753]: I1205 17:28:58.981400 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerName="machine-config-daemon" containerID="cri-o://e5f48dac911921ce787e6e9fea9709c824d57dffe736123659ddb77fb75ed48a" gracePeriod=600 Dec 05 17:28:59 crc kubenswrapper[4753]: I1205 17:28:59.814541 4753 generic.go:334] "Generic (PLEG): container finished" podID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerID="e5f48dac911921ce787e6e9fea9709c824d57dffe736123659ddb77fb75ed48a" exitCode=0 Dec 05 17:28:59 crc kubenswrapper[4753]: I1205 17:28:59.814649 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" event={"ID":"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68","Type":"ContainerDied","Data":"e5f48dac911921ce787e6e9fea9709c824d57dffe736123659ddb77fb75ed48a"} Dec 05 17:28:59 crc kubenswrapper[4753]: I1205 17:28:59.814959 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" event={"ID":"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68","Type":"ContainerStarted","Data":"87682a74661e693e498cd793cc20d16fc9f4a3b8a1a6b54f10285e2dcd15eafd"} Dec 05 17:28:59 crc kubenswrapper[4753]: I1205 17:28:59.814990 4753 scope.go:117] "RemoveContainer" containerID="2170ea733cb4ab2e379e8299ee324bec350d8db90db1f1068d3bbeda7a75cd94" Dec 05 17:29:11 crc kubenswrapper[4753]: I1205 17:29:11.037090 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-79f6547568-z26rz" Dec 05 17:29:11 crc kubenswrapper[4753]: I1205 17:29:11.873679 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-2gprd"] Dec 05 17:29:11 crc kubenswrapper[4753]: E1205 17:29:11.874109 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4537eed6-18dc-49b6-8711-be48d8502029" containerName="extract-utilities" Dec 05 17:29:11 crc kubenswrapper[4753]: I1205 17:29:11.874132 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="4537eed6-18dc-49b6-8711-be48d8502029" containerName="extract-utilities" Dec 05 17:29:11 crc kubenswrapper[4753]: E1205 17:29:11.874185 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4537eed6-18dc-49b6-8711-be48d8502029" containerName="extract-content" Dec 05 17:29:11 crc kubenswrapper[4753]: I1205 17:29:11.874195 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="4537eed6-18dc-49b6-8711-be48d8502029" containerName="extract-content" Dec 05 17:29:11 crc kubenswrapper[4753]: E1205 17:29:11.874218 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4537eed6-18dc-49b6-8711-be48d8502029" containerName="registry-server" Dec 05 17:29:11 crc kubenswrapper[4753]: I1205 17:29:11.874224 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="4537eed6-18dc-49b6-8711-be48d8502029" containerName="registry-server" Dec 05 17:29:11 crc kubenswrapper[4753]: I1205 17:29:11.874384 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="4537eed6-18dc-49b6-8711-be48d8502029" containerName="registry-server" Dec 05 17:29:11 crc kubenswrapper[4753]: I1205 17:29:11.876856 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-2gprd" Dec 05 17:29:11 crc kubenswrapper[4753]: I1205 17:29:11.879520 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-hn9ht"] Dec 05 17:29:11 crc kubenswrapper[4753]: I1205 17:29:11.879650 4753 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-h662v" Dec 05 17:29:11 crc kubenswrapper[4753]: I1205 17:29:11.879737 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Dec 05 17:29:11 crc kubenswrapper[4753]: I1205 17:29:11.879841 4753 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Dec 05 17:29:11 crc kubenswrapper[4753]: I1205 17:29:11.880489 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-hn9ht" Dec 05 17:29:11 crc kubenswrapper[4753]: I1205 17:29:11.881714 4753 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Dec 05 17:29:11 crc kubenswrapper[4753]: I1205 17:29:11.901199 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-hn9ht"] Dec 05 17:29:11 crc kubenswrapper[4753]: I1205 17:29:11.974084 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/bade1527-dbee-4843-b00d-0a41e7c516d9-frr-sockets\") pod \"frr-k8s-2gprd\" (UID: \"bade1527-dbee-4843-b00d-0a41e7c516d9\") " pod="metallb-system/frr-k8s-2gprd" Dec 05 17:29:11 crc kubenswrapper[4753]: I1205 17:29:11.974531 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rq5vw\" (UniqueName: \"kubernetes.io/projected/bade1527-dbee-4843-b00d-0a41e7c516d9-kube-api-access-rq5vw\") pod \"frr-k8s-2gprd\" (UID: \"bade1527-dbee-4843-b00d-0a41e7c516d9\") " pod="metallb-system/frr-k8s-2gprd" Dec 05 17:29:11 crc kubenswrapper[4753]: I1205 17:29:11.974586 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/42fa5f2a-00be-462f-b4f2-35e8b89e8a5e-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-hn9ht\" (UID: \"42fa5f2a-00be-462f-b4f2-35e8b89e8a5e\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-hn9ht" Dec 05 17:29:11 crc kubenswrapper[4753]: I1205 17:29:11.974618 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/bade1527-dbee-4843-b00d-0a41e7c516d9-metrics\") pod \"frr-k8s-2gprd\" (UID: \"bade1527-dbee-4843-b00d-0a41e7c516d9\") " pod="metallb-system/frr-k8s-2gprd" Dec 05 17:29:11 crc kubenswrapper[4753]: I1205 17:29:11.974635 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/bade1527-dbee-4843-b00d-0a41e7c516d9-metrics-certs\") pod \"frr-k8s-2gprd\" (UID: \"bade1527-dbee-4843-b00d-0a41e7c516d9\") " pod="metallb-system/frr-k8s-2gprd" Dec 05 17:29:11 crc kubenswrapper[4753]: I1205 17:29:11.974664 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/bade1527-dbee-4843-b00d-0a41e7c516d9-frr-startup\") pod \"frr-k8s-2gprd\" (UID: \"bade1527-dbee-4843-b00d-0a41e7c516d9\") " pod="metallb-system/frr-k8s-2gprd" Dec 05 17:29:11 crc kubenswrapper[4753]: I1205 17:29:11.974688 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/bade1527-dbee-4843-b00d-0a41e7c516d9-frr-conf\") pod \"frr-k8s-2gprd\" (UID: \"bade1527-dbee-4843-b00d-0a41e7c516d9\") " pod="metallb-system/frr-k8s-2gprd" Dec 05 17:29:11 crc kubenswrapper[4753]: I1205 17:29:11.974708 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/bade1527-dbee-4843-b00d-0a41e7c516d9-reloader\") pod \"frr-k8s-2gprd\" (UID: \"bade1527-dbee-4843-b00d-0a41e7c516d9\") " pod="metallb-system/frr-k8s-2gprd" Dec 05 17:29:11 crc kubenswrapper[4753]: I1205 17:29:11.974854 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m9jdk\" (UniqueName: \"kubernetes.io/projected/42fa5f2a-00be-462f-b4f2-35e8b89e8a5e-kube-api-access-m9jdk\") pod \"frr-k8s-webhook-server-7fcb986d4-hn9ht\" (UID: \"42fa5f2a-00be-462f-b4f2-35e8b89e8a5e\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-hn9ht" Dec 05 17:29:12 crc kubenswrapper[4753]: I1205 17:29:12.008558 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-jls6g"] Dec 05 17:29:12 crc kubenswrapper[4753]: I1205 17:29:12.010254 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-jls6g" Dec 05 17:29:12 crc kubenswrapper[4753]: I1205 17:29:12.013242 4753 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Dec 05 17:29:12 crc kubenswrapper[4753]: I1205 17:29:12.013796 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Dec 05 17:29:12 crc kubenswrapper[4753]: I1205 17:29:12.013805 4753 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-xjvsf" Dec 05 17:29:12 crc kubenswrapper[4753]: I1205 17:29:12.015588 4753 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Dec 05 17:29:12 crc kubenswrapper[4753]: I1205 17:29:12.029310 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-f8648f98b-q7lzx"] Dec 05 17:29:12 crc kubenswrapper[4753]: I1205 17:29:12.030526 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-q7lzx" Dec 05 17:29:12 crc kubenswrapper[4753]: I1205 17:29:12.032715 4753 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Dec 05 17:29:12 crc kubenswrapper[4753]: I1205 17:29:12.045520 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-q7lzx"] Dec 05 17:29:12 crc kubenswrapper[4753]: I1205 17:29:12.076505 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dbqqx\" (UniqueName: \"kubernetes.io/projected/08625193-514e-494a-b64a-75f345cf14bc-kube-api-access-dbqqx\") pod \"speaker-jls6g\" (UID: \"08625193-514e-494a-b64a-75f345cf14bc\") " pod="metallb-system/speaker-jls6g" Dec 05 17:29:12 crc kubenswrapper[4753]: I1205 17:29:12.076574 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/bade1527-dbee-4843-b00d-0a41e7c516d9-metrics\") pod \"frr-k8s-2gprd\" (UID: \"bade1527-dbee-4843-b00d-0a41e7c516d9\") " pod="metallb-system/frr-k8s-2gprd" Dec 05 17:29:12 crc kubenswrapper[4753]: I1205 17:29:12.076596 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/bade1527-dbee-4843-b00d-0a41e7c516d9-metrics-certs\") pod \"frr-k8s-2gprd\" (UID: \"bade1527-dbee-4843-b00d-0a41e7c516d9\") " pod="metallb-system/frr-k8s-2gprd" Dec 05 17:29:12 crc kubenswrapper[4753]: I1205 17:29:12.076631 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/bade1527-dbee-4843-b00d-0a41e7c516d9-frr-startup\") pod \"frr-k8s-2gprd\" (UID: \"bade1527-dbee-4843-b00d-0a41e7c516d9\") " pod="metallb-system/frr-k8s-2gprd" Dec 05 17:29:12 crc kubenswrapper[4753]: I1205 17:29:12.076743 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/08625193-514e-494a-b64a-75f345cf14bc-metallb-excludel2\") pod \"speaker-jls6g\" (UID: \"08625193-514e-494a-b64a-75f345cf14bc\") " pod="metallb-system/speaker-jls6g" Dec 05 17:29:12 crc kubenswrapper[4753]: I1205 17:29:12.076837 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/bade1527-dbee-4843-b00d-0a41e7c516d9-frr-conf\") pod \"frr-k8s-2gprd\" (UID: \"bade1527-dbee-4843-b00d-0a41e7c516d9\") " pod="metallb-system/frr-k8s-2gprd" Dec 05 17:29:12 crc kubenswrapper[4753]: I1205 17:29:12.076874 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/bade1527-dbee-4843-b00d-0a41e7c516d9-reloader\") pod \"frr-k8s-2gprd\" (UID: \"bade1527-dbee-4843-b00d-0a41e7c516d9\") " pod="metallb-system/frr-k8s-2gprd" Dec 05 17:29:12 crc kubenswrapper[4753]: I1205 17:29:12.076917 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m9jdk\" (UniqueName: \"kubernetes.io/projected/42fa5f2a-00be-462f-b4f2-35e8b89e8a5e-kube-api-access-m9jdk\") pod \"frr-k8s-webhook-server-7fcb986d4-hn9ht\" (UID: \"42fa5f2a-00be-462f-b4f2-35e8b89e8a5e\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-hn9ht" Dec 05 17:29:12 crc kubenswrapper[4753]: I1205 17:29:12.077034 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/08625193-514e-494a-b64a-75f345cf14bc-metrics-certs\") pod \"speaker-jls6g\" (UID: \"08625193-514e-494a-b64a-75f345cf14bc\") " pod="metallb-system/speaker-jls6g" Dec 05 17:29:12 crc kubenswrapper[4753]: I1205 17:29:12.077068 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/bade1527-dbee-4843-b00d-0a41e7c516d9-frr-sockets\") pod \"frr-k8s-2gprd\" (UID: \"bade1527-dbee-4843-b00d-0a41e7c516d9\") " pod="metallb-system/frr-k8s-2gprd" Dec 05 17:29:12 crc kubenswrapper[4753]: I1205 17:29:12.077133 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rq5vw\" (UniqueName: \"kubernetes.io/projected/bade1527-dbee-4843-b00d-0a41e7c516d9-kube-api-access-rq5vw\") pod \"frr-k8s-2gprd\" (UID: \"bade1527-dbee-4843-b00d-0a41e7c516d9\") " pod="metallb-system/frr-k8s-2gprd" Dec 05 17:29:12 crc kubenswrapper[4753]: I1205 17:29:12.077514 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/42fa5f2a-00be-462f-b4f2-35e8b89e8a5e-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-hn9ht\" (UID: \"42fa5f2a-00be-462f-b4f2-35e8b89e8a5e\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-hn9ht" Dec 05 17:29:12 crc kubenswrapper[4753]: I1205 17:29:12.077559 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/08625193-514e-494a-b64a-75f345cf14bc-memberlist\") pod \"speaker-jls6g\" (UID: \"08625193-514e-494a-b64a-75f345cf14bc\") " pod="metallb-system/speaker-jls6g" Dec 05 17:29:12 crc kubenswrapper[4753]: I1205 17:29:12.077644 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/bade1527-dbee-4843-b00d-0a41e7c516d9-metrics\") pod \"frr-k8s-2gprd\" (UID: \"bade1527-dbee-4843-b00d-0a41e7c516d9\") " pod="metallb-system/frr-k8s-2gprd" Dec 05 17:29:12 crc kubenswrapper[4753]: I1205 17:29:12.077777 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/bade1527-dbee-4843-b00d-0a41e7c516d9-frr-conf\") pod \"frr-k8s-2gprd\" (UID: \"bade1527-dbee-4843-b00d-0a41e7c516d9\") " pod="metallb-system/frr-k8s-2gprd" Dec 05 17:29:12 crc kubenswrapper[4753]: I1205 17:29:12.078018 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/bade1527-dbee-4843-b00d-0a41e7c516d9-reloader\") pod \"frr-k8s-2gprd\" (UID: \"bade1527-dbee-4843-b00d-0a41e7c516d9\") " pod="metallb-system/frr-k8s-2gprd" Dec 05 17:29:12 crc kubenswrapper[4753]: I1205 17:29:12.078090 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/bade1527-dbee-4843-b00d-0a41e7c516d9-frr-sockets\") pod \"frr-k8s-2gprd\" (UID: \"bade1527-dbee-4843-b00d-0a41e7c516d9\") " pod="metallb-system/frr-k8s-2gprd" Dec 05 17:29:12 crc kubenswrapper[4753]: I1205 17:29:12.078849 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/bade1527-dbee-4843-b00d-0a41e7c516d9-frr-startup\") pod \"frr-k8s-2gprd\" (UID: \"bade1527-dbee-4843-b00d-0a41e7c516d9\") " pod="metallb-system/frr-k8s-2gprd" Dec 05 17:29:12 crc kubenswrapper[4753]: I1205 17:29:12.085101 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/42fa5f2a-00be-462f-b4f2-35e8b89e8a5e-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-hn9ht\" (UID: \"42fa5f2a-00be-462f-b4f2-35e8b89e8a5e\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-hn9ht" Dec 05 17:29:12 crc kubenswrapper[4753]: I1205 17:29:12.088607 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/bade1527-dbee-4843-b00d-0a41e7c516d9-metrics-certs\") pod \"frr-k8s-2gprd\" (UID: \"bade1527-dbee-4843-b00d-0a41e7c516d9\") " pod="metallb-system/frr-k8s-2gprd" Dec 05 17:29:12 crc kubenswrapper[4753]: I1205 17:29:12.105998 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rq5vw\" (UniqueName: \"kubernetes.io/projected/bade1527-dbee-4843-b00d-0a41e7c516d9-kube-api-access-rq5vw\") pod \"frr-k8s-2gprd\" (UID: \"bade1527-dbee-4843-b00d-0a41e7c516d9\") " pod="metallb-system/frr-k8s-2gprd" Dec 05 17:29:12 crc kubenswrapper[4753]: I1205 17:29:12.111063 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m9jdk\" (UniqueName: \"kubernetes.io/projected/42fa5f2a-00be-462f-b4f2-35e8b89e8a5e-kube-api-access-m9jdk\") pod \"frr-k8s-webhook-server-7fcb986d4-hn9ht\" (UID: \"42fa5f2a-00be-462f-b4f2-35e8b89e8a5e\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-hn9ht" Dec 05 17:29:12 crc kubenswrapper[4753]: I1205 17:29:12.180506 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/08625193-514e-494a-b64a-75f345cf14bc-metallb-excludel2\") pod \"speaker-jls6g\" (UID: \"08625193-514e-494a-b64a-75f345cf14bc\") " pod="metallb-system/speaker-jls6g" Dec 05 17:29:12 crc kubenswrapper[4753]: I1205 17:29:12.179507 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/08625193-514e-494a-b64a-75f345cf14bc-metallb-excludel2\") pod \"speaker-jls6g\" (UID: \"08625193-514e-494a-b64a-75f345cf14bc\") " pod="metallb-system/speaker-jls6g" Dec 05 17:29:12 crc kubenswrapper[4753]: I1205 17:29:12.180675 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/23e6e527-684e-4e11-8470-b4149bb4c6cc-metrics-certs\") pod \"controller-f8648f98b-q7lzx\" (UID: \"23e6e527-684e-4e11-8470-b4149bb4c6cc\") " pod="metallb-system/controller-f8648f98b-q7lzx" Dec 05 17:29:12 crc kubenswrapper[4753]: I1205 17:29:12.180702 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/08625193-514e-494a-b64a-75f345cf14bc-metrics-certs\") pod \"speaker-jls6g\" (UID: \"08625193-514e-494a-b64a-75f345cf14bc\") " pod="metallb-system/speaker-jls6g" Dec 05 17:29:12 crc kubenswrapper[4753]: I1205 17:29:12.180796 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kzj6w\" (UniqueName: \"kubernetes.io/projected/23e6e527-684e-4e11-8470-b4149bb4c6cc-kube-api-access-kzj6w\") pod \"controller-f8648f98b-q7lzx\" (UID: \"23e6e527-684e-4e11-8470-b4149bb4c6cc\") " pod="metallb-system/controller-f8648f98b-q7lzx" Dec 05 17:29:12 crc kubenswrapper[4753]: I1205 17:29:12.180917 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/23e6e527-684e-4e11-8470-b4149bb4c6cc-cert\") pod \"controller-f8648f98b-q7lzx\" (UID: \"23e6e527-684e-4e11-8470-b4149bb4c6cc\") " pod="metallb-system/controller-f8648f98b-q7lzx" Dec 05 17:29:12 crc kubenswrapper[4753]: I1205 17:29:12.180945 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/08625193-514e-494a-b64a-75f345cf14bc-memberlist\") pod \"speaker-jls6g\" (UID: \"08625193-514e-494a-b64a-75f345cf14bc\") " pod="metallb-system/speaker-jls6g" Dec 05 17:29:12 crc kubenswrapper[4753]: I1205 17:29:12.180975 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dbqqx\" (UniqueName: \"kubernetes.io/projected/08625193-514e-494a-b64a-75f345cf14bc-kube-api-access-dbqqx\") pod \"speaker-jls6g\" (UID: \"08625193-514e-494a-b64a-75f345cf14bc\") " pod="metallb-system/speaker-jls6g" Dec 05 17:29:12 crc kubenswrapper[4753]: E1205 17:29:12.181225 4753 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 05 17:29:12 crc kubenswrapper[4753]: E1205 17:29:12.181540 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/08625193-514e-494a-b64a-75f345cf14bc-memberlist podName:08625193-514e-494a-b64a-75f345cf14bc nodeName:}" failed. No retries permitted until 2025-12-05 17:29:12.681513317 +0000 UTC m=+1491.184620563 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/08625193-514e-494a-b64a-75f345cf14bc-memberlist") pod "speaker-jls6g" (UID: "08625193-514e-494a-b64a-75f345cf14bc") : secret "metallb-memberlist" not found Dec 05 17:29:12 crc kubenswrapper[4753]: I1205 17:29:12.185078 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/08625193-514e-494a-b64a-75f345cf14bc-metrics-certs\") pod \"speaker-jls6g\" (UID: \"08625193-514e-494a-b64a-75f345cf14bc\") " pod="metallb-system/speaker-jls6g" Dec 05 17:29:12 crc kubenswrapper[4753]: I1205 17:29:12.201069 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-2gprd" Dec 05 17:29:12 crc kubenswrapper[4753]: I1205 17:29:12.201227 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dbqqx\" (UniqueName: \"kubernetes.io/projected/08625193-514e-494a-b64a-75f345cf14bc-kube-api-access-dbqqx\") pod \"speaker-jls6g\" (UID: \"08625193-514e-494a-b64a-75f345cf14bc\") " pod="metallb-system/speaker-jls6g" Dec 05 17:29:12 crc kubenswrapper[4753]: I1205 17:29:12.208662 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-hn9ht" Dec 05 17:29:12 crc kubenswrapper[4753]: I1205 17:29:12.283819 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/23e6e527-684e-4e11-8470-b4149bb4c6cc-cert\") pod \"controller-f8648f98b-q7lzx\" (UID: \"23e6e527-684e-4e11-8470-b4149bb4c6cc\") " pod="metallb-system/controller-f8648f98b-q7lzx" Dec 05 17:29:12 crc kubenswrapper[4753]: I1205 17:29:12.284007 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/23e6e527-684e-4e11-8470-b4149bb4c6cc-metrics-certs\") pod \"controller-f8648f98b-q7lzx\" (UID: \"23e6e527-684e-4e11-8470-b4149bb4c6cc\") " pod="metallb-system/controller-f8648f98b-q7lzx" Dec 05 17:29:12 crc kubenswrapper[4753]: I1205 17:29:12.284743 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kzj6w\" (UniqueName: \"kubernetes.io/projected/23e6e527-684e-4e11-8470-b4149bb4c6cc-kube-api-access-kzj6w\") pod \"controller-f8648f98b-q7lzx\" (UID: \"23e6e527-684e-4e11-8470-b4149bb4c6cc\") " pod="metallb-system/controller-f8648f98b-q7lzx" Dec 05 17:29:12 crc kubenswrapper[4753]: I1205 17:29:12.288359 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/23e6e527-684e-4e11-8470-b4149bb4c6cc-cert\") pod \"controller-f8648f98b-q7lzx\" (UID: \"23e6e527-684e-4e11-8470-b4149bb4c6cc\") " pod="metallb-system/controller-f8648f98b-q7lzx" Dec 05 17:29:12 crc kubenswrapper[4753]: I1205 17:29:12.289427 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/23e6e527-684e-4e11-8470-b4149bb4c6cc-metrics-certs\") pod \"controller-f8648f98b-q7lzx\" (UID: \"23e6e527-684e-4e11-8470-b4149bb4c6cc\") " pod="metallb-system/controller-f8648f98b-q7lzx" Dec 05 17:29:12 crc kubenswrapper[4753]: I1205 17:29:12.307592 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kzj6w\" (UniqueName: \"kubernetes.io/projected/23e6e527-684e-4e11-8470-b4149bb4c6cc-kube-api-access-kzj6w\") pod \"controller-f8648f98b-q7lzx\" (UID: \"23e6e527-684e-4e11-8470-b4149bb4c6cc\") " pod="metallb-system/controller-f8648f98b-q7lzx" Dec 05 17:29:12 crc kubenswrapper[4753]: I1205 17:29:12.346533 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-q7lzx" Dec 05 17:29:12 crc kubenswrapper[4753]: I1205 17:29:12.478528 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-hn9ht"] Dec 05 17:29:12 crc kubenswrapper[4753]: W1205 17:29:12.480789 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod42fa5f2a_00be_462f_b4f2_35e8b89e8a5e.slice/crio-ed1e1756f41269244fd98b6a6cc27792ff138c6e62f27de8a874bcf72d3bc58c WatchSource:0}: Error finding container ed1e1756f41269244fd98b6a6cc27792ff138c6e62f27de8a874bcf72d3bc58c: Status 404 returned error can't find the container with id ed1e1756f41269244fd98b6a6cc27792ff138c6e62f27de8a874bcf72d3bc58c Dec 05 17:29:12 crc kubenswrapper[4753]: I1205 17:29:12.692599 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/08625193-514e-494a-b64a-75f345cf14bc-memberlist\") pod \"speaker-jls6g\" (UID: \"08625193-514e-494a-b64a-75f345cf14bc\") " pod="metallb-system/speaker-jls6g" Dec 05 17:29:12 crc kubenswrapper[4753]: E1205 17:29:12.692848 4753 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 05 17:29:12 crc kubenswrapper[4753]: E1205 17:29:12.692969 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/08625193-514e-494a-b64a-75f345cf14bc-memberlist podName:08625193-514e-494a-b64a-75f345cf14bc nodeName:}" failed. No retries permitted until 2025-12-05 17:29:13.692941422 +0000 UTC m=+1492.196048428 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/08625193-514e-494a-b64a-75f345cf14bc-memberlist") pod "speaker-jls6g" (UID: "08625193-514e-494a-b64a-75f345cf14bc") : secret "metallb-memberlist" not found Dec 05 17:29:12 crc kubenswrapper[4753]: I1205 17:29:12.835039 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-q7lzx"] Dec 05 17:29:12 crc kubenswrapper[4753]: W1205 17:29:12.844080 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod23e6e527_684e_4e11_8470_b4149bb4c6cc.slice/crio-d386abc0c11f3f2d4cb3dd3894fd3742208a6187f06ee843fcd9459e5ce9e361 WatchSource:0}: Error finding container d386abc0c11f3f2d4cb3dd3894fd3742208a6187f06ee843fcd9459e5ce9e361: Status 404 returned error can't find the container with id d386abc0c11f3f2d4cb3dd3894fd3742208a6187f06ee843fcd9459e5ce9e361 Dec 05 17:29:12 crc kubenswrapper[4753]: I1205 17:29:12.933381 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-hn9ht" event={"ID":"42fa5f2a-00be-462f-b4f2-35e8b89e8a5e","Type":"ContainerStarted","Data":"ed1e1756f41269244fd98b6a6cc27792ff138c6e62f27de8a874bcf72d3bc58c"} Dec 05 17:29:12 crc kubenswrapper[4753]: I1205 17:29:12.935892 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-2gprd" event={"ID":"bade1527-dbee-4843-b00d-0a41e7c516d9","Type":"ContainerStarted","Data":"12b6ce835a9eca62961762dadec839d27a8b77e3f82be5d998393ff210e1013d"} Dec 05 17:29:12 crc kubenswrapper[4753]: I1205 17:29:12.938389 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-q7lzx" event={"ID":"23e6e527-684e-4e11-8470-b4149bb4c6cc","Type":"ContainerStarted","Data":"d386abc0c11f3f2d4cb3dd3894fd3742208a6187f06ee843fcd9459e5ce9e361"} Dec 05 17:29:13 crc kubenswrapper[4753]: I1205 17:29:13.740456 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/08625193-514e-494a-b64a-75f345cf14bc-memberlist\") pod \"speaker-jls6g\" (UID: \"08625193-514e-494a-b64a-75f345cf14bc\") " pod="metallb-system/speaker-jls6g" Dec 05 17:29:13 crc kubenswrapper[4753]: I1205 17:29:13.757302 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/08625193-514e-494a-b64a-75f345cf14bc-memberlist\") pod \"speaker-jls6g\" (UID: \"08625193-514e-494a-b64a-75f345cf14bc\") " pod="metallb-system/speaker-jls6g" Dec 05 17:29:13 crc kubenswrapper[4753]: I1205 17:29:13.831670 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-jls6g" Dec 05 17:29:13 crc kubenswrapper[4753]: W1205 17:29:13.868659 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod08625193_514e_494a_b64a_75f345cf14bc.slice/crio-859c444b75845df3ca68b81571cb5a99e9c0a934c93fd4efcf9b17b741b28028 WatchSource:0}: Error finding container 859c444b75845df3ca68b81571cb5a99e9c0a934c93fd4efcf9b17b741b28028: Status 404 returned error can't find the container with id 859c444b75845df3ca68b81571cb5a99e9c0a934c93fd4efcf9b17b741b28028 Dec 05 17:29:13 crc kubenswrapper[4753]: I1205 17:29:13.958533 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-q7lzx" event={"ID":"23e6e527-684e-4e11-8470-b4149bb4c6cc","Type":"ContainerStarted","Data":"e103b29f4240d0828312b4a0a2be4053e97d97f1c74b7393c12bebd99a92bbff"} Dec 05 17:29:13 crc kubenswrapper[4753]: I1205 17:29:13.958607 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-q7lzx" event={"ID":"23e6e527-684e-4e11-8470-b4149bb4c6cc","Type":"ContainerStarted","Data":"fec4757d9585d9914d25397ea9773a518bf9466683d54289323ca7d9b7483401"} Dec 05 17:29:13 crc kubenswrapper[4753]: I1205 17:29:13.958756 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-f8648f98b-q7lzx" Dec 05 17:29:13 crc kubenswrapper[4753]: I1205 17:29:13.960167 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-jls6g" event={"ID":"08625193-514e-494a-b64a-75f345cf14bc","Type":"ContainerStarted","Data":"859c444b75845df3ca68b81571cb5a99e9c0a934c93fd4efcf9b17b741b28028"} Dec 05 17:29:13 crc kubenswrapper[4753]: I1205 17:29:13.986534 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-f8648f98b-q7lzx" podStartSLOduration=1.986511196 podStartE2EDuration="1.986511196s" podCreationTimestamp="2025-12-05 17:29:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:29:13.98418063 +0000 UTC m=+1492.487287656" watchObservedRunningTime="2025-12-05 17:29:13.986511196 +0000 UTC m=+1492.489618202" Dec 05 17:29:14 crc kubenswrapper[4753]: I1205 17:29:14.968519 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-jls6g" event={"ID":"08625193-514e-494a-b64a-75f345cf14bc","Type":"ContainerStarted","Data":"f018e56804431d5d1370ec2552a375c2a40959fa64444c5b471b3e12e3841fd9"} Dec 05 17:29:14 crc kubenswrapper[4753]: I1205 17:29:14.969086 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-jls6g" event={"ID":"08625193-514e-494a-b64a-75f345cf14bc","Type":"ContainerStarted","Data":"138dd1334871e2cd30482a7bd887ddfafdebd1703f00ec4da7af7aca35a191d9"} Dec 05 17:29:14 crc kubenswrapper[4753]: I1205 17:29:14.998762 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-jls6g" podStartSLOduration=3.998719133 podStartE2EDuration="3.998719133s" podCreationTimestamp="2025-12-05 17:29:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:29:14.994446523 +0000 UTC m=+1493.497553539" watchObservedRunningTime="2025-12-05 17:29:14.998719133 +0000 UTC m=+1493.501826139" Dec 05 17:29:15 crc kubenswrapper[4753]: I1205 17:29:15.979492 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-jls6g" Dec 05 17:29:21 crc kubenswrapper[4753]: I1205 17:29:21.028931 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-hn9ht" event={"ID":"42fa5f2a-00be-462f-b4f2-35e8b89e8a5e","Type":"ContainerStarted","Data":"27d8dd1309113da18db004402e559e471aa4754771a52f5ae6b39b11bce50330"} Dec 05 17:29:21 crc kubenswrapper[4753]: I1205 17:29:21.029889 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-hn9ht" Dec 05 17:29:21 crc kubenswrapper[4753]: I1205 17:29:21.031299 4753 generic.go:334] "Generic (PLEG): container finished" podID="bade1527-dbee-4843-b00d-0a41e7c516d9" containerID="5100c89971b6c0e52b4e9c85aa3eab25845cb027d7d5250de9b1a2eff22b3cfd" exitCode=0 Dec 05 17:29:21 crc kubenswrapper[4753]: I1205 17:29:21.031382 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-2gprd" event={"ID":"bade1527-dbee-4843-b00d-0a41e7c516d9","Type":"ContainerDied","Data":"5100c89971b6c0e52b4e9c85aa3eab25845cb027d7d5250de9b1a2eff22b3cfd"} Dec 05 17:29:21 crc kubenswrapper[4753]: I1205 17:29:21.067572 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-hn9ht" podStartSLOduration=2.607220658 podStartE2EDuration="10.067535199s" podCreationTimestamp="2025-12-05 17:29:11 +0000 UTC" firstStartedPulling="2025-12-05 17:29:12.48335371 +0000 UTC m=+1490.986460716" lastFinishedPulling="2025-12-05 17:29:19.943668251 +0000 UTC m=+1498.446775257" observedRunningTime="2025-12-05 17:29:21.05625062 +0000 UTC m=+1499.559357696" watchObservedRunningTime="2025-12-05 17:29:21.067535199 +0000 UTC m=+1499.570642205" Dec 05 17:29:22 crc kubenswrapper[4753]: I1205 17:29:22.048584 4753 generic.go:334] "Generic (PLEG): container finished" podID="bade1527-dbee-4843-b00d-0a41e7c516d9" containerID="be928131d5f90f55b77142f15c681d522da1c83c3fc8dd0027bf7897fea530b2" exitCode=0 Dec 05 17:29:22 crc kubenswrapper[4753]: I1205 17:29:22.048718 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-2gprd" event={"ID":"bade1527-dbee-4843-b00d-0a41e7c516d9","Type":"ContainerDied","Data":"be928131d5f90f55b77142f15c681d522da1c83c3fc8dd0027bf7897fea530b2"} Dec 05 17:29:22 crc kubenswrapper[4753]: I1205 17:29:22.352304 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-f8648f98b-q7lzx" Dec 05 17:29:23 crc kubenswrapper[4753]: I1205 17:29:23.064906 4753 generic.go:334] "Generic (PLEG): container finished" podID="bade1527-dbee-4843-b00d-0a41e7c516d9" containerID="b3327733e73e60a4bfffce7d013a06a967f4b30ecfcef09e094c77573f5150c9" exitCode=0 Dec 05 17:29:23 crc kubenswrapper[4753]: I1205 17:29:23.065044 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-2gprd" event={"ID":"bade1527-dbee-4843-b00d-0a41e7c516d9","Type":"ContainerDied","Data":"b3327733e73e60a4bfffce7d013a06a967f4b30ecfcef09e094c77573f5150c9"} Dec 05 17:29:23 crc kubenswrapper[4753]: I1205 17:29:23.080853 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-6vjs8"] Dec 05 17:29:23 crc kubenswrapper[4753]: I1205 17:29:23.084996 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6vjs8" Dec 05 17:29:23 crc kubenswrapper[4753]: I1205 17:29:23.093909 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6vjs8"] Dec 05 17:29:23 crc kubenswrapper[4753]: I1205 17:29:23.221929 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vhl82\" (UniqueName: \"kubernetes.io/projected/166af0a2-ee5e-45ea-afcb-9ae199303ed0-kube-api-access-vhl82\") pod \"community-operators-6vjs8\" (UID: \"166af0a2-ee5e-45ea-afcb-9ae199303ed0\") " pod="openshift-marketplace/community-operators-6vjs8" Dec 05 17:29:23 crc kubenswrapper[4753]: I1205 17:29:23.222000 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/166af0a2-ee5e-45ea-afcb-9ae199303ed0-catalog-content\") pod \"community-operators-6vjs8\" (UID: \"166af0a2-ee5e-45ea-afcb-9ae199303ed0\") " pod="openshift-marketplace/community-operators-6vjs8" Dec 05 17:29:23 crc kubenswrapper[4753]: I1205 17:29:23.222037 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/166af0a2-ee5e-45ea-afcb-9ae199303ed0-utilities\") pod \"community-operators-6vjs8\" (UID: \"166af0a2-ee5e-45ea-afcb-9ae199303ed0\") " pod="openshift-marketplace/community-operators-6vjs8" Dec 05 17:29:23 crc kubenswrapper[4753]: I1205 17:29:23.324699 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vhl82\" (UniqueName: \"kubernetes.io/projected/166af0a2-ee5e-45ea-afcb-9ae199303ed0-kube-api-access-vhl82\") pod \"community-operators-6vjs8\" (UID: \"166af0a2-ee5e-45ea-afcb-9ae199303ed0\") " pod="openshift-marketplace/community-operators-6vjs8" Dec 05 17:29:23 crc kubenswrapper[4753]: I1205 17:29:23.325106 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/166af0a2-ee5e-45ea-afcb-9ae199303ed0-catalog-content\") pod \"community-operators-6vjs8\" (UID: \"166af0a2-ee5e-45ea-afcb-9ae199303ed0\") " pod="openshift-marketplace/community-operators-6vjs8" Dec 05 17:29:23 crc kubenswrapper[4753]: I1205 17:29:23.325134 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/166af0a2-ee5e-45ea-afcb-9ae199303ed0-utilities\") pod \"community-operators-6vjs8\" (UID: \"166af0a2-ee5e-45ea-afcb-9ae199303ed0\") " pod="openshift-marketplace/community-operators-6vjs8" Dec 05 17:29:23 crc kubenswrapper[4753]: I1205 17:29:23.325649 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/166af0a2-ee5e-45ea-afcb-9ae199303ed0-catalog-content\") pod \"community-operators-6vjs8\" (UID: \"166af0a2-ee5e-45ea-afcb-9ae199303ed0\") " pod="openshift-marketplace/community-operators-6vjs8" Dec 05 17:29:23 crc kubenswrapper[4753]: I1205 17:29:23.325678 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/166af0a2-ee5e-45ea-afcb-9ae199303ed0-utilities\") pod \"community-operators-6vjs8\" (UID: \"166af0a2-ee5e-45ea-afcb-9ae199303ed0\") " pod="openshift-marketplace/community-operators-6vjs8" Dec 05 17:29:23 crc kubenswrapper[4753]: I1205 17:29:23.361211 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vhl82\" (UniqueName: \"kubernetes.io/projected/166af0a2-ee5e-45ea-afcb-9ae199303ed0-kube-api-access-vhl82\") pod \"community-operators-6vjs8\" (UID: \"166af0a2-ee5e-45ea-afcb-9ae199303ed0\") " pod="openshift-marketplace/community-operators-6vjs8" Dec 05 17:29:23 crc kubenswrapper[4753]: I1205 17:29:23.478287 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6vjs8" Dec 05 17:29:24 crc kubenswrapper[4753]: I1205 17:29:24.036422 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6vjs8"] Dec 05 17:29:24 crc kubenswrapper[4753]: I1205 17:29:24.082717 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-2gprd" event={"ID":"bade1527-dbee-4843-b00d-0a41e7c516d9","Type":"ContainerStarted","Data":"e0e24f033e9a4931765e6e62738fce0cec1a22873842afa5d2ff3e0d38fc376e"} Dec 05 17:29:24 crc kubenswrapper[4753]: I1205 17:29:24.082777 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-2gprd" event={"ID":"bade1527-dbee-4843-b00d-0a41e7c516d9","Type":"ContainerStarted","Data":"93b9661cc564b4a090ecdf6bab99b6aa58d66bcd748ecfe8ec621119d14727c4"} Dec 05 17:29:24 crc kubenswrapper[4753]: I1205 17:29:24.082789 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-2gprd" event={"ID":"bade1527-dbee-4843-b00d-0a41e7c516d9","Type":"ContainerStarted","Data":"f13dd48f7ddb5af042a2afdd57e68bbc174d1993512689b7e7c272200d5d2016"} Dec 05 17:29:24 crc kubenswrapper[4753]: I1205 17:29:24.082800 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-2gprd" event={"ID":"bade1527-dbee-4843-b00d-0a41e7c516d9","Type":"ContainerStarted","Data":"c8b77188c12a1d0085e0b93d8d3fefd813806b65d4026acde94747bf84f2717c"} Dec 05 17:29:24 crc kubenswrapper[4753]: I1205 17:29:24.083928 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6vjs8" event={"ID":"166af0a2-ee5e-45ea-afcb-9ae199303ed0","Type":"ContainerStarted","Data":"f89f52546b06a3bae8769298f8baa55f5dbba6775ead4ba919b7d947e419e288"} Dec 05 17:29:25 crc kubenswrapper[4753]: I1205 17:29:25.093859 4753 generic.go:334] "Generic (PLEG): container finished" podID="166af0a2-ee5e-45ea-afcb-9ae199303ed0" containerID="176b4b342653c558dbeba778f3642635c5fbad4fc535aae129ec7acf8aa39f8f" exitCode=0 Dec 05 17:29:25 crc kubenswrapper[4753]: I1205 17:29:25.093977 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6vjs8" event={"ID":"166af0a2-ee5e-45ea-afcb-9ae199303ed0","Type":"ContainerDied","Data":"176b4b342653c558dbeba778f3642635c5fbad4fc535aae129ec7acf8aa39f8f"} Dec 05 17:29:25 crc kubenswrapper[4753]: I1205 17:29:25.100565 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-2gprd" event={"ID":"bade1527-dbee-4843-b00d-0a41e7c516d9","Type":"ContainerStarted","Data":"6c785a28ba8098cbed779177e9b4972469da18e339946effe77fb8f8188ed5a3"} Dec 05 17:29:25 crc kubenswrapper[4753]: I1205 17:29:25.100632 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-2gprd" event={"ID":"bade1527-dbee-4843-b00d-0a41e7c516d9","Type":"ContainerStarted","Data":"60c63dafabe06c93daf62265bebdd636452b075459ff721013daee5c76c69fc8"} Dec 05 17:29:25 crc kubenswrapper[4753]: I1205 17:29:25.100978 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-2gprd" Dec 05 17:29:25 crc kubenswrapper[4753]: I1205 17:29:25.149734 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-2gprd" podStartSLOduration=6.553667703 podStartE2EDuration="14.149706632s" podCreationTimestamp="2025-12-05 17:29:11 +0000 UTC" firstStartedPulling="2025-12-05 17:29:12.37875108 +0000 UTC m=+1490.881858096" lastFinishedPulling="2025-12-05 17:29:19.974790019 +0000 UTC m=+1498.477897025" observedRunningTime="2025-12-05 17:29:25.142360815 +0000 UTC m=+1503.645467831" watchObservedRunningTime="2025-12-05 17:29:25.149706632 +0000 UTC m=+1503.652813648" Dec 05 17:29:26 crc kubenswrapper[4753]: I1205 17:29:26.110057 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6vjs8" event={"ID":"166af0a2-ee5e-45ea-afcb-9ae199303ed0","Type":"ContainerStarted","Data":"c4f346e61666ebd8ba20f77dc52efe5fce74c4ebed145eabfd023f55546f5b5b"} Dec 05 17:29:27 crc kubenswrapper[4753]: I1205 17:29:27.120764 4753 generic.go:334] "Generic (PLEG): container finished" podID="166af0a2-ee5e-45ea-afcb-9ae199303ed0" containerID="c4f346e61666ebd8ba20f77dc52efe5fce74c4ebed145eabfd023f55546f5b5b" exitCode=0 Dec 05 17:29:27 crc kubenswrapper[4753]: I1205 17:29:27.120856 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6vjs8" event={"ID":"166af0a2-ee5e-45ea-afcb-9ae199303ed0","Type":"ContainerDied","Data":"c4f346e61666ebd8ba20f77dc52efe5fce74c4ebed145eabfd023f55546f5b5b"} Dec 05 17:29:27 crc kubenswrapper[4753]: I1205 17:29:27.202737 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-2gprd" Dec 05 17:29:27 crc kubenswrapper[4753]: I1205 17:29:27.262990 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-2gprd" Dec 05 17:29:28 crc kubenswrapper[4753]: I1205 17:29:28.134348 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6vjs8" event={"ID":"166af0a2-ee5e-45ea-afcb-9ae199303ed0","Type":"ContainerStarted","Data":"5db82970995af8fa30fab6127f219cbf516785217639017bbff9f863aedc01b4"} Dec 05 17:29:28 crc kubenswrapper[4753]: I1205 17:29:28.161909 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-6vjs8" podStartSLOduration=2.7494577380000003 podStartE2EDuration="5.161884257s" podCreationTimestamp="2025-12-05 17:29:23 +0000 UTC" firstStartedPulling="2025-12-05 17:29:25.096403809 +0000 UTC m=+1503.599510815" lastFinishedPulling="2025-12-05 17:29:27.508830328 +0000 UTC m=+1506.011937334" observedRunningTime="2025-12-05 17:29:28.1548889 +0000 UTC m=+1506.657995936" watchObservedRunningTime="2025-12-05 17:29:28.161884257 +0000 UTC m=+1506.664991263" Dec 05 17:29:32 crc kubenswrapper[4753]: I1205 17:29:32.215267 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-hn9ht" Dec 05 17:29:33 crc kubenswrapper[4753]: I1205 17:29:33.479435 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-6vjs8" Dec 05 17:29:33 crc kubenswrapper[4753]: I1205 17:29:33.479592 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-6vjs8" Dec 05 17:29:33 crc kubenswrapper[4753]: I1205 17:29:33.532605 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-6vjs8" Dec 05 17:29:33 crc kubenswrapper[4753]: I1205 17:29:33.840793 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-jls6g" Dec 05 17:29:34 crc kubenswrapper[4753]: I1205 17:29:34.251647 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-6vjs8" Dec 05 17:29:34 crc kubenswrapper[4753]: I1205 17:29:34.312124 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-6vjs8"] Dec 05 17:29:36 crc kubenswrapper[4753]: I1205 17:29:36.202732 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-6vjs8" podUID="166af0a2-ee5e-45ea-afcb-9ae199303ed0" containerName="registry-server" containerID="cri-o://5db82970995af8fa30fab6127f219cbf516785217639017bbff9f863aedc01b4" gracePeriod=2 Dec 05 17:29:36 crc kubenswrapper[4753]: I1205 17:29:36.704039 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-4bfkp"] Dec 05 17:29:36 crc kubenswrapper[4753]: I1205 17:29:36.705476 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-4bfkp" Dec 05 17:29:36 crc kubenswrapper[4753]: I1205 17:29:36.709240 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Dec 05 17:29:36 crc kubenswrapper[4753]: I1205 17:29:36.709489 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-qw8bj" Dec 05 17:29:36 crc kubenswrapper[4753]: I1205 17:29:36.710209 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Dec 05 17:29:36 crc kubenswrapper[4753]: I1205 17:29:36.742912 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-4bfkp"] Dec 05 17:29:36 crc kubenswrapper[4753]: I1205 17:29:36.867736 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wxjmw\" (UniqueName: \"kubernetes.io/projected/5d35d519-eb52-45f6-9a2d-e9c5353920a3-kube-api-access-wxjmw\") pod \"openstack-operator-index-4bfkp\" (UID: \"5d35d519-eb52-45f6-9a2d-e9c5353920a3\") " pod="openstack-operators/openstack-operator-index-4bfkp" Dec 05 17:29:36 crc kubenswrapper[4753]: I1205 17:29:36.969349 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wxjmw\" (UniqueName: \"kubernetes.io/projected/5d35d519-eb52-45f6-9a2d-e9c5353920a3-kube-api-access-wxjmw\") pod \"openstack-operator-index-4bfkp\" (UID: \"5d35d519-eb52-45f6-9a2d-e9c5353920a3\") " pod="openstack-operators/openstack-operator-index-4bfkp" Dec 05 17:29:37 crc kubenswrapper[4753]: I1205 17:29:37.000711 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wxjmw\" (UniqueName: \"kubernetes.io/projected/5d35d519-eb52-45f6-9a2d-e9c5353920a3-kube-api-access-wxjmw\") pod \"openstack-operator-index-4bfkp\" (UID: \"5d35d519-eb52-45f6-9a2d-e9c5353920a3\") " pod="openstack-operators/openstack-operator-index-4bfkp" Dec 05 17:29:37 crc kubenswrapper[4753]: I1205 17:29:37.031204 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-4bfkp" Dec 05 17:29:37 crc kubenswrapper[4753]: I1205 17:29:37.220427 4753 generic.go:334] "Generic (PLEG): container finished" podID="166af0a2-ee5e-45ea-afcb-9ae199303ed0" containerID="5db82970995af8fa30fab6127f219cbf516785217639017bbff9f863aedc01b4" exitCode=0 Dec 05 17:29:37 crc kubenswrapper[4753]: I1205 17:29:37.220510 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6vjs8" event={"ID":"166af0a2-ee5e-45ea-afcb-9ae199303ed0","Type":"ContainerDied","Data":"5db82970995af8fa30fab6127f219cbf516785217639017bbff9f863aedc01b4"} Dec 05 17:29:37 crc kubenswrapper[4753]: W1205 17:29:37.517784 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5d35d519_eb52_45f6_9a2d_e9c5353920a3.slice/crio-65fecf83a75ce837503d414f4ab50523163402aaf5cb12538b846e0ebbabe005 WatchSource:0}: Error finding container 65fecf83a75ce837503d414f4ab50523163402aaf5cb12538b846e0ebbabe005: Status 404 returned error can't find the container with id 65fecf83a75ce837503d414f4ab50523163402aaf5cb12538b846e0ebbabe005 Dec 05 17:29:37 crc kubenswrapper[4753]: I1205 17:29:37.524627 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-4bfkp"] Dec 05 17:29:37 crc kubenswrapper[4753]: I1205 17:29:37.753570 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6vjs8" Dec 05 17:29:37 crc kubenswrapper[4753]: I1205 17:29:37.887494 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vhl82\" (UniqueName: \"kubernetes.io/projected/166af0a2-ee5e-45ea-afcb-9ae199303ed0-kube-api-access-vhl82\") pod \"166af0a2-ee5e-45ea-afcb-9ae199303ed0\" (UID: \"166af0a2-ee5e-45ea-afcb-9ae199303ed0\") " Dec 05 17:29:37 crc kubenswrapper[4753]: I1205 17:29:37.888144 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/166af0a2-ee5e-45ea-afcb-9ae199303ed0-utilities\") pod \"166af0a2-ee5e-45ea-afcb-9ae199303ed0\" (UID: \"166af0a2-ee5e-45ea-afcb-9ae199303ed0\") " Dec 05 17:29:37 crc kubenswrapper[4753]: I1205 17:29:37.888243 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/166af0a2-ee5e-45ea-afcb-9ae199303ed0-catalog-content\") pod \"166af0a2-ee5e-45ea-afcb-9ae199303ed0\" (UID: \"166af0a2-ee5e-45ea-afcb-9ae199303ed0\") " Dec 05 17:29:37 crc kubenswrapper[4753]: I1205 17:29:37.889360 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/166af0a2-ee5e-45ea-afcb-9ae199303ed0-utilities" (OuterVolumeSpecName: "utilities") pod "166af0a2-ee5e-45ea-afcb-9ae199303ed0" (UID: "166af0a2-ee5e-45ea-afcb-9ae199303ed0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:29:37 crc kubenswrapper[4753]: I1205 17:29:37.903810 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/166af0a2-ee5e-45ea-afcb-9ae199303ed0-kube-api-access-vhl82" (OuterVolumeSpecName: "kube-api-access-vhl82") pod "166af0a2-ee5e-45ea-afcb-9ae199303ed0" (UID: "166af0a2-ee5e-45ea-afcb-9ae199303ed0"). InnerVolumeSpecName "kube-api-access-vhl82". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:29:37 crc kubenswrapper[4753]: I1205 17:29:37.966033 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/166af0a2-ee5e-45ea-afcb-9ae199303ed0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "166af0a2-ee5e-45ea-afcb-9ae199303ed0" (UID: "166af0a2-ee5e-45ea-afcb-9ae199303ed0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:29:37 crc kubenswrapper[4753]: I1205 17:29:37.990729 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vhl82\" (UniqueName: \"kubernetes.io/projected/166af0a2-ee5e-45ea-afcb-9ae199303ed0-kube-api-access-vhl82\") on node \"crc\" DevicePath \"\"" Dec 05 17:29:37 crc kubenswrapper[4753]: I1205 17:29:37.990775 4753 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/166af0a2-ee5e-45ea-afcb-9ae199303ed0-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 17:29:37 crc kubenswrapper[4753]: I1205 17:29:37.990789 4753 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/166af0a2-ee5e-45ea-afcb-9ae199303ed0-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 17:29:38 crc kubenswrapper[4753]: I1205 17:29:38.251114 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6vjs8" event={"ID":"166af0a2-ee5e-45ea-afcb-9ae199303ed0","Type":"ContainerDied","Data":"f89f52546b06a3bae8769298f8baa55f5dbba6775ead4ba919b7d947e419e288"} Dec 05 17:29:38 crc kubenswrapper[4753]: I1205 17:29:38.251202 4753 scope.go:117] "RemoveContainer" containerID="5db82970995af8fa30fab6127f219cbf516785217639017bbff9f863aedc01b4" Dec 05 17:29:38 crc kubenswrapper[4753]: I1205 17:29:38.251351 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6vjs8" Dec 05 17:29:38 crc kubenswrapper[4753]: I1205 17:29:38.255074 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-4bfkp" event={"ID":"5d35d519-eb52-45f6-9a2d-e9c5353920a3","Type":"ContainerStarted","Data":"65fecf83a75ce837503d414f4ab50523163402aaf5cb12538b846e0ebbabe005"} Dec 05 17:29:38 crc kubenswrapper[4753]: I1205 17:29:38.292717 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-6vjs8"] Dec 05 17:29:38 crc kubenswrapper[4753]: I1205 17:29:38.298387 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-6vjs8"] Dec 05 17:29:38 crc kubenswrapper[4753]: I1205 17:29:38.399182 4753 scope.go:117] "RemoveContainer" containerID="c4f346e61666ebd8ba20f77dc52efe5fce74c4ebed145eabfd023f55546f5b5b" Dec 05 17:29:38 crc kubenswrapper[4753]: I1205 17:29:38.615062 4753 scope.go:117] "RemoveContainer" containerID="176b4b342653c558dbeba778f3642635c5fbad4fc535aae129ec7acf8aa39f8f" Dec 05 17:29:39 crc kubenswrapper[4753]: I1205 17:29:39.732047 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="166af0a2-ee5e-45ea-afcb-9ae199303ed0" path="/var/lib/kubelet/pods/166af0a2-ee5e-45ea-afcb-9ae199303ed0/volumes" Dec 05 17:29:40 crc kubenswrapper[4753]: I1205 17:29:40.278819 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-4bfkp" event={"ID":"5d35d519-eb52-45f6-9a2d-e9c5353920a3","Type":"ContainerStarted","Data":"04afd5539928cf23a7d4493ba0633e9c2601ee17ac4f24f21b48a27d5ba38e27"} Dec 05 17:29:40 crc kubenswrapper[4753]: I1205 17:29:40.311036 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-4bfkp" podStartSLOduration=1.948127486 podStartE2EDuration="4.310998118s" podCreationTimestamp="2025-12-05 17:29:36 +0000 UTC" firstStartedPulling="2025-12-05 17:29:37.521240726 +0000 UTC m=+1516.024347742" lastFinishedPulling="2025-12-05 17:29:39.884111368 +0000 UTC m=+1518.387218374" observedRunningTime="2025-12-05 17:29:40.29972425 +0000 UTC m=+1518.802831296" watchObservedRunningTime="2025-12-05 17:29:40.310998118 +0000 UTC m=+1518.814105164" Dec 05 17:29:40 crc kubenswrapper[4753]: I1205 17:29:40.788562 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-4bfkp"] Dec 05 17:29:41 crc kubenswrapper[4753]: I1205 17:29:41.590471 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-5blfl"] Dec 05 17:29:41 crc kubenswrapper[4753]: E1205 17:29:41.592615 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="166af0a2-ee5e-45ea-afcb-9ae199303ed0" containerName="extract-utilities" Dec 05 17:29:41 crc kubenswrapper[4753]: I1205 17:29:41.592642 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="166af0a2-ee5e-45ea-afcb-9ae199303ed0" containerName="extract-utilities" Dec 05 17:29:41 crc kubenswrapper[4753]: E1205 17:29:41.592652 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="166af0a2-ee5e-45ea-afcb-9ae199303ed0" containerName="registry-server" Dec 05 17:29:41 crc kubenswrapper[4753]: I1205 17:29:41.592660 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="166af0a2-ee5e-45ea-afcb-9ae199303ed0" containerName="registry-server" Dec 05 17:29:41 crc kubenswrapper[4753]: E1205 17:29:41.592668 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="166af0a2-ee5e-45ea-afcb-9ae199303ed0" containerName="extract-content" Dec 05 17:29:41 crc kubenswrapper[4753]: I1205 17:29:41.592678 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="166af0a2-ee5e-45ea-afcb-9ae199303ed0" containerName="extract-content" Dec 05 17:29:41 crc kubenswrapper[4753]: I1205 17:29:41.592815 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="166af0a2-ee5e-45ea-afcb-9ae199303ed0" containerName="registry-server" Dec 05 17:29:41 crc kubenswrapper[4753]: I1205 17:29:41.593543 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-5blfl" Dec 05 17:29:41 crc kubenswrapper[4753]: I1205 17:29:41.607874 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-5blfl"] Dec 05 17:29:41 crc kubenswrapper[4753]: I1205 17:29:41.763697 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5m5sr\" (UniqueName: \"kubernetes.io/projected/46c0a8e5-1a5d-4fd6-bf89-a91826a0b99f-kube-api-access-5m5sr\") pod \"openstack-operator-index-5blfl\" (UID: \"46c0a8e5-1a5d-4fd6-bf89-a91826a0b99f\") " pod="openstack-operators/openstack-operator-index-5blfl" Dec 05 17:29:41 crc kubenswrapper[4753]: I1205 17:29:41.865722 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5m5sr\" (UniqueName: \"kubernetes.io/projected/46c0a8e5-1a5d-4fd6-bf89-a91826a0b99f-kube-api-access-5m5sr\") pod \"openstack-operator-index-5blfl\" (UID: \"46c0a8e5-1a5d-4fd6-bf89-a91826a0b99f\") " pod="openstack-operators/openstack-operator-index-5blfl" Dec 05 17:29:41 crc kubenswrapper[4753]: I1205 17:29:41.900678 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5m5sr\" (UniqueName: \"kubernetes.io/projected/46c0a8e5-1a5d-4fd6-bf89-a91826a0b99f-kube-api-access-5m5sr\") pod \"openstack-operator-index-5blfl\" (UID: \"46c0a8e5-1a5d-4fd6-bf89-a91826a0b99f\") " pod="openstack-operators/openstack-operator-index-5blfl" Dec 05 17:29:41 crc kubenswrapper[4753]: I1205 17:29:41.929522 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-5blfl" Dec 05 17:29:42 crc kubenswrapper[4753]: I1205 17:29:42.204264 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-2gprd" Dec 05 17:29:42 crc kubenswrapper[4753]: I1205 17:29:42.295549 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-4bfkp" podUID="5d35d519-eb52-45f6-9a2d-e9c5353920a3" containerName="registry-server" containerID="cri-o://04afd5539928cf23a7d4493ba0633e9c2601ee17ac4f24f21b48a27d5ba38e27" gracePeriod=2 Dec 05 17:29:42 crc kubenswrapper[4753]: I1205 17:29:42.419518 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-5blfl"] Dec 05 17:29:42 crc kubenswrapper[4753]: W1205 17:29:42.425006 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod46c0a8e5_1a5d_4fd6_bf89_a91826a0b99f.slice/crio-5043be9df0c9822decbbac47e82ef1acf9f21ad3bd7cc8df81dbaebe614ddde3 WatchSource:0}: Error finding container 5043be9df0c9822decbbac47e82ef1acf9f21ad3bd7cc8df81dbaebe614ddde3: Status 404 returned error can't find the container with id 5043be9df0c9822decbbac47e82ef1acf9f21ad3bd7cc8df81dbaebe614ddde3 Dec 05 17:29:42 crc kubenswrapper[4753]: I1205 17:29:42.656930 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-4bfkp" Dec 05 17:29:42 crc kubenswrapper[4753]: I1205 17:29:42.702576 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxjmw\" (UniqueName: \"kubernetes.io/projected/5d35d519-eb52-45f6-9a2d-e9c5353920a3-kube-api-access-wxjmw\") pod \"5d35d519-eb52-45f6-9a2d-e9c5353920a3\" (UID: \"5d35d519-eb52-45f6-9a2d-e9c5353920a3\") " Dec 05 17:29:42 crc kubenswrapper[4753]: I1205 17:29:42.710751 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5d35d519-eb52-45f6-9a2d-e9c5353920a3-kube-api-access-wxjmw" (OuterVolumeSpecName: "kube-api-access-wxjmw") pod "5d35d519-eb52-45f6-9a2d-e9c5353920a3" (UID: "5d35d519-eb52-45f6-9a2d-e9c5353920a3"). InnerVolumeSpecName "kube-api-access-wxjmw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:29:42 crc kubenswrapper[4753]: I1205 17:29:42.805517 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxjmw\" (UniqueName: \"kubernetes.io/projected/5d35d519-eb52-45f6-9a2d-e9c5353920a3-kube-api-access-wxjmw\") on node \"crc\" DevicePath \"\"" Dec 05 17:29:43 crc kubenswrapper[4753]: I1205 17:29:43.306725 4753 generic.go:334] "Generic (PLEG): container finished" podID="5d35d519-eb52-45f6-9a2d-e9c5353920a3" containerID="04afd5539928cf23a7d4493ba0633e9c2601ee17ac4f24f21b48a27d5ba38e27" exitCode=0 Dec 05 17:29:43 crc kubenswrapper[4753]: I1205 17:29:43.306787 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-4bfkp" Dec 05 17:29:43 crc kubenswrapper[4753]: I1205 17:29:43.307713 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-4bfkp" event={"ID":"5d35d519-eb52-45f6-9a2d-e9c5353920a3","Type":"ContainerDied","Data":"04afd5539928cf23a7d4493ba0633e9c2601ee17ac4f24f21b48a27d5ba38e27"} Dec 05 17:29:43 crc kubenswrapper[4753]: I1205 17:29:43.307781 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-4bfkp" event={"ID":"5d35d519-eb52-45f6-9a2d-e9c5353920a3","Type":"ContainerDied","Data":"65fecf83a75ce837503d414f4ab50523163402aaf5cb12538b846e0ebbabe005"} Dec 05 17:29:43 crc kubenswrapper[4753]: I1205 17:29:43.307816 4753 scope.go:117] "RemoveContainer" containerID="04afd5539928cf23a7d4493ba0633e9c2601ee17ac4f24f21b48a27d5ba38e27" Dec 05 17:29:43 crc kubenswrapper[4753]: I1205 17:29:43.310378 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-5blfl" event={"ID":"46c0a8e5-1a5d-4fd6-bf89-a91826a0b99f","Type":"ContainerStarted","Data":"bf8b3b3c1117a50f7a344121acab9a4f6c23190b637fdd0f2ea9cf0ea874eb60"} Dec 05 17:29:43 crc kubenswrapper[4753]: I1205 17:29:43.310509 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-5blfl" event={"ID":"46c0a8e5-1a5d-4fd6-bf89-a91826a0b99f","Type":"ContainerStarted","Data":"5043be9df0c9822decbbac47e82ef1acf9f21ad3bd7cc8df81dbaebe614ddde3"} Dec 05 17:29:43 crc kubenswrapper[4753]: I1205 17:29:43.353347 4753 scope.go:117] "RemoveContainer" containerID="04afd5539928cf23a7d4493ba0633e9c2601ee17ac4f24f21b48a27d5ba38e27" Dec 05 17:29:43 crc kubenswrapper[4753]: E1205 17:29:43.354214 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"04afd5539928cf23a7d4493ba0633e9c2601ee17ac4f24f21b48a27d5ba38e27\": container with ID starting with 04afd5539928cf23a7d4493ba0633e9c2601ee17ac4f24f21b48a27d5ba38e27 not found: ID does not exist" containerID="04afd5539928cf23a7d4493ba0633e9c2601ee17ac4f24f21b48a27d5ba38e27" Dec 05 17:29:43 crc kubenswrapper[4753]: I1205 17:29:43.354262 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"04afd5539928cf23a7d4493ba0633e9c2601ee17ac4f24f21b48a27d5ba38e27"} err="failed to get container status \"04afd5539928cf23a7d4493ba0633e9c2601ee17ac4f24f21b48a27d5ba38e27\": rpc error: code = NotFound desc = could not find container \"04afd5539928cf23a7d4493ba0633e9c2601ee17ac4f24f21b48a27d5ba38e27\": container with ID starting with 04afd5539928cf23a7d4493ba0633e9c2601ee17ac4f24f21b48a27d5ba38e27 not found: ID does not exist" Dec 05 17:29:43 crc kubenswrapper[4753]: I1205 17:29:43.361522 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-5blfl" podStartSLOduration=2.309798256 podStartE2EDuration="2.361447032s" podCreationTimestamp="2025-12-05 17:29:41 +0000 UTC" firstStartedPulling="2025-12-05 17:29:42.430125316 +0000 UTC m=+1520.933232322" lastFinishedPulling="2025-12-05 17:29:42.481774092 +0000 UTC m=+1520.984881098" observedRunningTime="2025-12-05 17:29:43.340567463 +0000 UTC m=+1521.843674469" watchObservedRunningTime="2025-12-05 17:29:43.361447032 +0000 UTC m=+1521.864554108" Dec 05 17:29:43 crc kubenswrapper[4753]: I1205 17:29:43.374586 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-4bfkp"] Dec 05 17:29:43 crc kubenswrapper[4753]: I1205 17:29:43.380845 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-4bfkp"] Dec 05 17:29:43 crc kubenswrapper[4753]: I1205 17:29:43.728288 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5d35d519-eb52-45f6-9a2d-e9c5353920a3" path="/var/lib/kubelet/pods/5d35d519-eb52-45f6-9a2d-e9c5353920a3/volumes" Dec 05 17:29:51 crc kubenswrapper[4753]: I1205 17:29:51.930584 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-5blfl" Dec 05 17:29:51 crc kubenswrapper[4753]: I1205 17:29:51.931731 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-5blfl" Dec 05 17:29:51 crc kubenswrapper[4753]: I1205 17:29:51.985310 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-5blfl" Dec 05 17:29:52 crc kubenswrapper[4753]: I1205 17:29:52.375718 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-5blfl" Dec 05 17:29:54 crc kubenswrapper[4753]: I1205 17:29:54.617528 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/10518271d11356773275a244d183abb577b9dea821837b29079f1397f09p4qq"] Dec 05 17:29:54 crc kubenswrapper[4753]: E1205 17:29:54.618474 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d35d519-eb52-45f6-9a2d-e9c5353920a3" containerName="registry-server" Dec 05 17:29:54 crc kubenswrapper[4753]: I1205 17:29:54.618548 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d35d519-eb52-45f6-9a2d-e9c5353920a3" containerName="registry-server" Dec 05 17:29:54 crc kubenswrapper[4753]: I1205 17:29:54.618773 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="5d35d519-eb52-45f6-9a2d-e9c5353920a3" containerName="registry-server" Dec 05 17:29:54 crc kubenswrapper[4753]: I1205 17:29:54.620112 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/10518271d11356773275a244d183abb577b9dea821837b29079f1397f09p4qq" Dec 05 17:29:54 crc kubenswrapper[4753]: I1205 17:29:54.622664 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-bntst" Dec 05 17:29:54 crc kubenswrapper[4753]: I1205 17:29:54.631277 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/10518271d11356773275a244d183abb577b9dea821837b29079f1397f09p4qq"] Dec 05 17:29:54 crc kubenswrapper[4753]: I1205 17:29:54.802534 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-th8jw\" (UniqueName: \"kubernetes.io/projected/e51268e8-feb1-4dcd-8eeb-4e81cc1ced69-kube-api-access-th8jw\") pod \"10518271d11356773275a244d183abb577b9dea821837b29079f1397f09p4qq\" (UID: \"e51268e8-feb1-4dcd-8eeb-4e81cc1ced69\") " pod="openstack-operators/10518271d11356773275a244d183abb577b9dea821837b29079f1397f09p4qq" Dec 05 17:29:54 crc kubenswrapper[4753]: I1205 17:29:54.802991 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e51268e8-feb1-4dcd-8eeb-4e81cc1ced69-bundle\") pod \"10518271d11356773275a244d183abb577b9dea821837b29079f1397f09p4qq\" (UID: \"e51268e8-feb1-4dcd-8eeb-4e81cc1ced69\") " pod="openstack-operators/10518271d11356773275a244d183abb577b9dea821837b29079f1397f09p4qq" Dec 05 17:29:54 crc kubenswrapper[4753]: I1205 17:29:54.803032 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e51268e8-feb1-4dcd-8eeb-4e81cc1ced69-util\") pod \"10518271d11356773275a244d183abb577b9dea821837b29079f1397f09p4qq\" (UID: \"e51268e8-feb1-4dcd-8eeb-4e81cc1ced69\") " pod="openstack-operators/10518271d11356773275a244d183abb577b9dea821837b29079f1397f09p4qq" Dec 05 17:29:54 crc kubenswrapper[4753]: I1205 17:29:54.904547 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-th8jw\" (UniqueName: \"kubernetes.io/projected/e51268e8-feb1-4dcd-8eeb-4e81cc1ced69-kube-api-access-th8jw\") pod \"10518271d11356773275a244d183abb577b9dea821837b29079f1397f09p4qq\" (UID: \"e51268e8-feb1-4dcd-8eeb-4e81cc1ced69\") " pod="openstack-operators/10518271d11356773275a244d183abb577b9dea821837b29079f1397f09p4qq" Dec 05 17:29:54 crc kubenswrapper[4753]: I1205 17:29:54.904671 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e51268e8-feb1-4dcd-8eeb-4e81cc1ced69-bundle\") pod \"10518271d11356773275a244d183abb577b9dea821837b29079f1397f09p4qq\" (UID: \"e51268e8-feb1-4dcd-8eeb-4e81cc1ced69\") " pod="openstack-operators/10518271d11356773275a244d183abb577b9dea821837b29079f1397f09p4qq" Dec 05 17:29:54 crc kubenswrapper[4753]: I1205 17:29:54.904715 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e51268e8-feb1-4dcd-8eeb-4e81cc1ced69-util\") pod \"10518271d11356773275a244d183abb577b9dea821837b29079f1397f09p4qq\" (UID: \"e51268e8-feb1-4dcd-8eeb-4e81cc1ced69\") " pod="openstack-operators/10518271d11356773275a244d183abb577b9dea821837b29079f1397f09p4qq" Dec 05 17:29:54 crc kubenswrapper[4753]: I1205 17:29:54.905275 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e51268e8-feb1-4dcd-8eeb-4e81cc1ced69-util\") pod \"10518271d11356773275a244d183abb577b9dea821837b29079f1397f09p4qq\" (UID: \"e51268e8-feb1-4dcd-8eeb-4e81cc1ced69\") " pod="openstack-operators/10518271d11356773275a244d183abb577b9dea821837b29079f1397f09p4qq" Dec 05 17:29:54 crc kubenswrapper[4753]: I1205 17:29:54.905841 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e51268e8-feb1-4dcd-8eeb-4e81cc1ced69-bundle\") pod \"10518271d11356773275a244d183abb577b9dea821837b29079f1397f09p4qq\" (UID: \"e51268e8-feb1-4dcd-8eeb-4e81cc1ced69\") " pod="openstack-operators/10518271d11356773275a244d183abb577b9dea821837b29079f1397f09p4qq" Dec 05 17:29:54 crc kubenswrapper[4753]: I1205 17:29:54.945440 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-th8jw\" (UniqueName: \"kubernetes.io/projected/e51268e8-feb1-4dcd-8eeb-4e81cc1ced69-kube-api-access-th8jw\") pod \"10518271d11356773275a244d183abb577b9dea821837b29079f1397f09p4qq\" (UID: \"e51268e8-feb1-4dcd-8eeb-4e81cc1ced69\") " pod="openstack-operators/10518271d11356773275a244d183abb577b9dea821837b29079f1397f09p4qq" Dec 05 17:29:54 crc kubenswrapper[4753]: I1205 17:29:54.959619 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/10518271d11356773275a244d183abb577b9dea821837b29079f1397f09p4qq" Dec 05 17:29:55 crc kubenswrapper[4753]: I1205 17:29:55.408775 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/10518271d11356773275a244d183abb577b9dea821837b29079f1397f09p4qq"] Dec 05 17:29:56 crc kubenswrapper[4753]: I1205 17:29:56.369234 4753 generic.go:334] "Generic (PLEG): container finished" podID="e51268e8-feb1-4dcd-8eeb-4e81cc1ced69" containerID="6836913b39b8f2b8e05fe331145b96ad63f37e9841a6c3b283632a7ad85eff6f" exitCode=0 Dec 05 17:29:56 crc kubenswrapper[4753]: I1205 17:29:56.369323 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/10518271d11356773275a244d183abb577b9dea821837b29079f1397f09p4qq" event={"ID":"e51268e8-feb1-4dcd-8eeb-4e81cc1ced69","Type":"ContainerDied","Data":"6836913b39b8f2b8e05fe331145b96ad63f37e9841a6c3b283632a7ad85eff6f"} Dec 05 17:29:56 crc kubenswrapper[4753]: I1205 17:29:56.370756 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/10518271d11356773275a244d183abb577b9dea821837b29079f1397f09p4qq" event={"ID":"e51268e8-feb1-4dcd-8eeb-4e81cc1ced69","Type":"ContainerStarted","Data":"43bf9b132f365eec7c66f8e4a23be0909e6e85087998119a818cd61a3b3a7598"} Dec 05 17:29:57 crc kubenswrapper[4753]: I1205 17:29:57.382441 4753 generic.go:334] "Generic (PLEG): container finished" podID="e51268e8-feb1-4dcd-8eeb-4e81cc1ced69" containerID="67eef4133e163e589d6db7850aa32964d660da61c8a6f76508c4d77a59e9def3" exitCode=0 Dec 05 17:29:57 crc kubenswrapper[4753]: I1205 17:29:57.382546 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/10518271d11356773275a244d183abb577b9dea821837b29079f1397f09p4qq" event={"ID":"e51268e8-feb1-4dcd-8eeb-4e81cc1ced69","Type":"ContainerDied","Data":"67eef4133e163e589d6db7850aa32964d660da61c8a6f76508c4d77a59e9def3"} Dec 05 17:29:58 crc kubenswrapper[4753]: I1205 17:29:58.393855 4753 generic.go:334] "Generic (PLEG): container finished" podID="e51268e8-feb1-4dcd-8eeb-4e81cc1ced69" containerID="1ba7cdd982bc8cc4b2e21fbc1bcc0f2cbac607ccc82a72b71ddb6acedfdb5333" exitCode=0 Dec 05 17:29:58 crc kubenswrapper[4753]: I1205 17:29:58.393921 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/10518271d11356773275a244d183abb577b9dea821837b29079f1397f09p4qq" event={"ID":"e51268e8-feb1-4dcd-8eeb-4e81cc1ced69","Type":"ContainerDied","Data":"1ba7cdd982bc8cc4b2e21fbc1bcc0f2cbac607ccc82a72b71ddb6acedfdb5333"} Dec 05 17:29:59 crc kubenswrapper[4753]: I1205 17:29:59.826810 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/10518271d11356773275a244d183abb577b9dea821837b29079f1397f09p4qq" Dec 05 17:30:00 crc kubenswrapper[4753]: I1205 17:30:00.002835 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e51268e8-feb1-4dcd-8eeb-4e81cc1ced69-bundle\") pod \"e51268e8-feb1-4dcd-8eeb-4e81cc1ced69\" (UID: \"e51268e8-feb1-4dcd-8eeb-4e81cc1ced69\") " Dec 05 17:30:00 crc kubenswrapper[4753]: I1205 17:30:00.002889 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-th8jw\" (UniqueName: \"kubernetes.io/projected/e51268e8-feb1-4dcd-8eeb-4e81cc1ced69-kube-api-access-th8jw\") pod \"e51268e8-feb1-4dcd-8eeb-4e81cc1ced69\" (UID: \"e51268e8-feb1-4dcd-8eeb-4e81cc1ced69\") " Dec 05 17:30:00 crc kubenswrapper[4753]: I1205 17:30:00.002920 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e51268e8-feb1-4dcd-8eeb-4e81cc1ced69-util\") pod \"e51268e8-feb1-4dcd-8eeb-4e81cc1ced69\" (UID: \"e51268e8-feb1-4dcd-8eeb-4e81cc1ced69\") " Dec 05 17:30:00 crc kubenswrapper[4753]: I1205 17:30:00.004585 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e51268e8-feb1-4dcd-8eeb-4e81cc1ced69-bundle" (OuterVolumeSpecName: "bundle") pod "e51268e8-feb1-4dcd-8eeb-4e81cc1ced69" (UID: "e51268e8-feb1-4dcd-8eeb-4e81cc1ced69"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:30:00 crc kubenswrapper[4753]: I1205 17:30:00.017524 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e51268e8-feb1-4dcd-8eeb-4e81cc1ced69-util" (OuterVolumeSpecName: "util") pod "e51268e8-feb1-4dcd-8eeb-4e81cc1ced69" (UID: "e51268e8-feb1-4dcd-8eeb-4e81cc1ced69"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:30:00 crc kubenswrapper[4753]: I1205 17:30:00.018317 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e51268e8-feb1-4dcd-8eeb-4e81cc1ced69-kube-api-access-th8jw" (OuterVolumeSpecName: "kube-api-access-th8jw") pod "e51268e8-feb1-4dcd-8eeb-4e81cc1ced69" (UID: "e51268e8-feb1-4dcd-8eeb-4e81cc1ced69"). InnerVolumeSpecName "kube-api-access-th8jw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:30:00 crc kubenswrapper[4753]: I1205 17:30:00.105736 4753 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e51268e8-feb1-4dcd-8eeb-4e81cc1ced69-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:30:00 crc kubenswrapper[4753]: I1205 17:30:00.105821 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-th8jw\" (UniqueName: \"kubernetes.io/projected/e51268e8-feb1-4dcd-8eeb-4e81cc1ced69-kube-api-access-th8jw\") on node \"crc\" DevicePath \"\"" Dec 05 17:30:00 crc kubenswrapper[4753]: I1205 17:30:00.105845 4753 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e51268e8-feb1-4dcd-8eeb-4e81cc1ced69-util\") on node \"crc\" DevicePath \"\"" Dec 05 17:30:00 crc kubenswrapper[4753]: I1205 17:30:00.160784 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415930-t9nll"] Dec 05 17:30:00 crc kubenswrapper[4753]: E1205 17:30:00.161355 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e51268e8-feb1-4dcd-8eeb-4e81cc1ced69" containerName="pull" Dec 05 17:30:00 crc kubenswrapper[4753]: I1205 17:30:00.161381 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="e51268e8-feb1-4dcd-8eeb-4e81cc1ced69" containerName="pull" Dec 05 17:30:00 crc kubenswrapper[4753]: E1205 17:30:00.161421 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e51268e8-feb1-4dcd-8eeb-4e81cc1ced69" containerName="util" Dec 05 17:30:00 crc kubenswrapper[4753]: I1205 17:30:00.161434 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="e51268e8-feb1-4dcd-8eeb-4e81cc1ced69" containerName="util" Dec 05 17:30:00 crc kubenswrapper[4753]: E1205 17:30:00.161453 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e51268e8-feb1-4dcd-8eeb-4e81cc1ced69" containerName="extract" Dec 05 17:30:00 crc kubenswrapper[4753]: I1205 17:30:00.161466 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="e51268e8-feb1-4dcd-8eeb-4e81cc1ced69" containerName="extract" Dec 05 17:30:00 crc kubenswrapper[4753]: I1205 17:30:00.161647 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="e51268e8-feb1-4dcd-8eeb-4e81cc1ced69" containerName="extract" Dec 05 17:30:00 crc kubenswrapper[4753]: I1205 17:30:00.162574 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415930-t9nll" Dec 05 17:30:00 crc kubenswrapper[4753]: I1205 17:30:00.165210 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 17:30:00 crc kubenswrapper[4753]: I1205 17:30:00.165346 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 17:30:00 crc kubenswrapper[4753]: I1205 17:30:00.173235 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415930-t9nll"] Dec 05 17:30:00 crc kubenswrapper[4753]: I1205 17:30:00.309824 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0eaa216f-034c-4be1-9041-706032f78479-secret-volume\") pod \"collect-profiles-29415930-t9nll\" (UID: \"0eaa216f-034c-4be1-9041-706032f78479\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415930-t9nll" Dec 05 17:30:00 crc kubenswrapper[4753]: I1205 17:30:00.311440 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0eaa216f-034c-4be1-9041-706032f78479-config-volume\") pod \"collect-profiles-29415930-t9nll\" (UID: \"0eaa216f-034c-4be1-9041-706032f78479\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415930-t9nll" Dec 05 17:30:00 crc kubenswrapper[4753]: I1205 17:30:00.311662 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-clft7\" (UniqueName: \"kubernetes.io/projected/0eaa216f-034c-4be1-9041-706032f78479-kube-api-access-clft7\") pod \"collect-profiles-29415930-t9nll\" (UID: \"0eaa216f-034c-4be1-9041-706032f78479\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415930-t9nll" Dec 05 17:30:00 crc kubenswrapper[4753]: I1205 17:30:00.413280 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0eaa216f-034c-4be1-9041-706032f78479-config-volume\") pod \"collect-profiles-29415930-t9nll\" (UID: \"0eaa216f-034c-4be1-9041-706032f78479\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415930-t9nll" Dec 05 17:30:00 crc kubenswrapper[4753]: I1205 17:30:00.413358 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-clft7\" (UniqueName: \"kubernetes.io/projected/0eaa216f-034c-4be1-9041-706032f78479-kube-api-access-clft7\") pod \"collect-profiles-29415930-t9nll\" (UID: \"0eaa216f-034c-4be1-9041-706032f78479\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415930-t9nll" Dec 05 17:30:00 crc kubenswrapper[4753]: I1205 17:30:00.413429 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0eaa216f-034c-4be1-9041-706032f78479-secret-volume\") pod \"collect-profiles-29415930-t9nll\" (UID: \"0eaa216f-034c-4be1-9041-706032f78479\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415930-t9nll" Dec 05 17:30:00 crc kubenswrapper[4753]: I1205 17:30:00.415325 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0eaa216f-034c-4be1-9041-706032f78479-config-volume\") pod \"collect-profiles-29415930-t9nll\" (UID: \"0eaa216f-034c-4be1-9041-706032f78479\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415930-t9nll" Dec 05 17:30:00 crc kubenswrapper[4753]: I1205 17:30:00.417216 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/10518271d11356773275a244d183abb577b9dea821837b29079f1397f09p4qq" event={"ID":"e51268e8-feb1-4dcd-8eeb-4e81cc1ced69","Type":"ContainerDied","Data":"43bf9b132f365eec7c66f8e4a23be0909e6e85087998119a818cd61a3b3a7598"} Dec 05 17:30:00 crc kubenswrapper[4753]: I1205 17:30:00.417303 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="43bf9b132f365eec7c66f8e4a23be0909e6e85087998119a818cd61a3b3a7598" Dec 05 17:30:00 crc kubenswrapper[4753]: I1205 17:30:00.417656 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/10518271d11356773275a244d183abb577b9dea821837b29079f1397f09p4qq" Dec 05 17:30:00 crc kubenswrapper[4753]: I1205 17:30:00.420871 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0eaa216f-034c-4be1-9041-706032f78479-secret-volume\") pod \"collect-profiles-29415930-t9nll\" (UID: \"0eaa216f-034c-4be1-9041-706032f78479\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415930-t9nll" Dec 05 17:30:00 crc kubenswrapper[4753]: I1205 17:30:00.434578 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-clft7\" (UniqueName: \"kubernetes.io/projected/0eaa216f-034c-4be1-9041-706032f78479-kube-api-access-clft7\") pod \"collect-profiles-29415930-t9nll\" (UID: \"0eaa216f-034c-4be1-9041-706032f78479\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415930-t9nll" Dec 05 17:30:00 crc kubenswrapper[4753]: I1205 17:30:00.488371 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415930-t9nll" Dec 05 17:30:00 crc kubenswrapper[4753]: I1205 17:30:00.988094 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415930-t9nll"] Dec 05 17:30:01 crc kubenswrapper[4753]: W1205 17:30:01.000215 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0eaa216f_034c_4be1_9041_706032f78479.slice/crio-fb811c45fc7b8a1a4164ca397fefa60c6e5e3e7d7b5f5fdc9f9af348331be5ee WatchSource:0}: Error finding container fb811c45fc7b8a1a4164ca397fefa60c6e5e3e7d7b5f5fdc9f9af348331be5ee: Status 404 returned error can't find the container with id fb811c45fc7b8a1a4164ca397fefa60c6e5e3e7d7b5f5fdc9f9af348331be5ee Dec 05 17:30:01 crc kubenswrapper[4753]: I1205 17:30:01.440209 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415930-t9nll" event={"ID":"0eaa216f-034c-4be1-9041-706032f78479","Type":"ContainerStarted","Data":"05debfed71e08e74e588ce61c0f8cfb802bc9e0de5d319bf1874d396028785f9"} Dec 05 17:30:01 crc kubenswrapper[4753]: I1205 17:30:01.440277 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415930-t9nll" event={"ID":"0eaa216f-034c-4be1-9041-706032f78479","Type":"ContainerStarted","Data":"fb811c45fc7b8a1a4164ca397fefa60c6e5e3e7d7b5f5fdc9f9af348331be5ee"} Dec 05 17:30:01 crc kubenswrapper[4753]: I1205 17:30:01.476380 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29415930-t9nll" podStartSLOduration=1.476362524 podStartE2EDuration="1.476362524s" podCreationTimestamp="2025-12-05 17:30:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:30:01.475020567 +0000 UTC m=+1539.978127573" watchObservedRunningTime="2025-12-05 17:30:01.476362524 +0000 UTC m=+1539.979469520" Dec 05 17:30:02 crc kubenswrapper[4753]: I1205 17:30:02.449793 4753 generic.go:334] "Generic (PLEG): container finished" podID="0eaa216f-034c-4be1-9041-706032f78479" containerID="05debfed71e08e74e588ce61c0f8cfb802bc9e0de5d319bf1874d396028785f9" exitCode=0 Dec 05 17:30:02 crc kubenswrapper[4753]: I1205 17:30:02.450337 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415930-t9nll" event={"ID":"0eaa216f-034c-4be1-9041-706032f78479","Type":"ContainerDied","Data":"05debfed71e08e74e588ce61c0f8cfb802bc9e0de5d319bf1874d396028785f9"} Dec 05 17:30:03 crc kubenswrapper[4753]: I1205 17:30:03.617362 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-5958697dc4-d8ztf"] Dec 05 17:30:03 crc kubenswrapper[4753]: I1205 17:30:03.619091 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-5958697dc4-d8ztf" Dec 05 17:30:03 crc kubenswrapper[4753]: I1205 17:30:03.631918 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-259wg" Dec 05 17:30:03 crc kubenswrapper[4753]: I1205 17:30:03.652157 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-5958697dc4-d8ztf"] Dec 05 17:30:03 crc kubenswrapper[4753]: I1205 17:30:03.773426 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tk2gk\" (UniqueName: \"kubernetes.io/projected/a970c0f5-9ad9-4bf9-b93b-5f6b72cca4ff-kube-api-access-tk2gk\") pod \"openstack-operator-controller-operator-5958697dc4-d8ztf\" (UID: \"a970c0f5-9ad9-4bf9-b93b-5f6b72cca4ff\") " pod="openstack-operators/openstack-operator-controller-operator-5958697dc4-d8ztf" Dec 05 17:30:03 crc kubenswrapper[4753]: I1205 17:30:03.871840 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415930-t9nll" Dec 05 17:30:03 crc kubenswrapper[4753]: I1205 17:30:03.875431 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tk2gk\" (UniqueName: \"kubernetes.io/projected/a970c0f5-9ad9-4bf9-b93b-5f6b72cca4ff-kube-api-access-tk2gk\") pod \"openstack-operator-controller-operator-5958697dc4-d8ztf\" (UID: \"a970c0f5-9ad9-4bf9-b93b-5f6b72cca4ff\") " pod="openstack-operators/openstack-operator-controller-operator-5958697dc4-d8ztf" Dec 05 17:30:03 crc kubenswrapper[4753]: I1205 17:30:03.897966 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tk2gk\" (UniqueName: \"kubernetes.io/projected/a970c0f5-9ad9-4bf9-b93b-5f6b72cca4ff-kube-api-access-tk2gk\") pod \"openstack-operator-controller-operator-5958697dc4-d8ztf\" (UID: \"a970c0f5-9ad9-4bf9-b93b-5f6b72cca4ff\") " pod="openstack-operators/openstack-operator-controller-operator-5958697dc4-d8ztf" Dec 05 17:30:03 crc kubenswrapper[4753]: I1205 17:30:03.954995 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-5958697dc4-d8ztf" Dec 05 17:30:03 crc kubenswrapper[4753]: I1205 17:30:03.978867 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0eaa216f-034c-4be1-9041-706032f78479-secret-volume\") pod \"0eaa216f-034c-4be1-9041-706032f78479\" (UID: \"0eaa216f-034c-4be1-9041-706032f78479\") " Dec 05 17:30:03 crc kubenswrapper[4753]: I1205 17:30:03.978973 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0eaa216f-034c-4be1-9041-706032f78479-config-volume\") pod \"0eaa216f-034c-4be1-9041-706032f78479\" (UID: \"0eaa216f-034c-4be1-9041-706032f78479\") " Dec 05 17:30:03 crc kubenswrapper[4753]: I1205 17:30:03.979077 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-clft7\" (UniqueName: \"kubernetes.io/projected/0eaa216f-034c-4be1-9041-706032f78479-kube-api-access-clft7\") pod \"0eaa216f-034c-4be1-9041-706032f78479\" (UID: \"0eaa216f-034c-4be1-9041-706032f78479\") " Dec 05 17:30:03 crc kubenswrapper[4753]: I1205 17:30:03.981572 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0eaa216f-034c-4be1-9041-706032f78479-config-volume" (OuterVolumeSpecName: "config-volume") pod "0eaa216f-034c-4be1-9041-706032f78479" (UID: "0eaa216f-034c-4be1-9041-706032f78479"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:30:03 crc kubenswrapper[4753]: I1205 17:30:03.983242 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0eaa216f-034c-4be1-9041-706032f78479-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "0eaa216f-034c-4be1-9041-706032f78479" (UID: "0eaa216f-034c-4be1-9041-706032f78479"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:30:03 crc kubenswrapper[4753]: I1205 17:30:03.983313 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0eaa216f-034c-4be1-9041-706032f78479-kube-api-access-clft7" (OuterVolumeSpecName: "kube-api-access-clft7") pod "0eaa216f-034c-4be1-9041-706032f78479" (UID: "0eaa216f-034c-4be1-9041-706032f78479"). InnerVolumeSpecName "kube-api-access-clft7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:30:04 crc kubenswrapper[4753]: I1205 17:30:04.080814 4753 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/0eaa216f-034c-4be1-9041-706032f78479-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 17:30:04 crc kubenswrapper[4753]: I1205 17:30:04.080849 4753 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0eaa216f-034c-4be1-9041-706032f78479-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 17:30:04 crc kubenswrapper[4753]: I1205 17:30:04.080861 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-clft7\" (UniqueName: \"kubernetes.io/projected/0eaa216f-034c-4be1-9041-706032f78479-kube-api-access-clft7\") on node \"crc\" DevicePath \"\"" Dec 05 17:30:04 crc kubenswrapper[4753]: I1205 17:30:04.248032 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-5958697dc4-d8ztf"] Dec 05 17:30:04 crc kubenswrapper[4753]: I1205 17:30:04.503336 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-5958697dc4-d8ztf" event={"ID":"a970c0f5-9ad9-4bf9-b93b-5f6b72cca4ff","Type":"ContainerStarted","Data":"21bc412efa91a1d838086a8b9e9714210fca9cb36974a28bae06075ea153d20c"} Dec 05 17:30:04 crc kubenswrapper[4753]: I1205 17:30:04.531459 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415930-t9nll" event={"ID":"0eaa216f-034c-4be1-9041-706032f78479","Type":"ContainerDied","Data":"fb811c45fc7b8a1a4164ca397fefa60c6e5e3e7d7b5f5fdc9f9af348331be5ee"} Dec 05 17:30:04 crc kubenswrapper[4753]: I1205 17:30:04.531522 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fb811c45fc7b8a1a4164ca397fefa60c6e5e3e7d7b5f5fdc9f9af348331be5ee" Dec 05 17:30:04 crc kubenswrapper[4753]: I1205 17:30:04.531870 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415930-t9nll" Dec 05 17:30:09 crc kubenswrapper[4753]: I1205 17:30:09.573962 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-5958697dc4-d8ztf" event={"ID":"a970c0f5-9ad9-4bf9-b93b-5f6b72cca4ff","Type":"ContainerStarted","Data":"8704c0d2ab1b06f8f660fb2d943d20c4480e962fe0ad7f6d61050f1232259b50"} Dec 05 17:30:09 crc kubenswrapper[4753]: I1205 17:30:09.574825 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-5958697dc4-d8ztf" Dec 05 17:30:09 crc kubenswrapper[4753]: I1205 17:30:09.610324 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-5958697dc4-d8ztf" podStartSLOduration=2.122923043 podStartE2EDuration="6.610294834s" podCreationTimestamp="2025-12-05 17:30:03 +0000 UTC" firstStartedPulling="2025-12-05 17:30:04.25027294 +0000 UTC m=+1542.753379946" lastFinishedPulling="2025-12-05 17:30:08.737644731 +0000 UTC m=+1547.240751737" observedRunningTime="2025-12-05 17:30:09.606711363 +0000 UTC m=+1548.109818369" watchObservedRunningTime="2025-12-05 17:30:09.610294834 +0000 UTC m=+1548.113401840" Dec 05 17:30:13 crc kubenswrapper[4753]: I1205 17:30:13.960101 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-5958697dc4-d8ztf" Dec 05 17:30:25 crc kubenswrapper[4753]: I1205 17:30:25.388440 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-m5k72"] Dec 05 17:30:25 crc kubenswrapper[4753]: E1205 17:30:25.389871 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0eaa216f-034c-4be1-9041-706032f78479" containerName="collect-profiles" Dec 05 17:30:25 crc kubenswrapper[4753]: I1205 17:30:25.389892 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="0eaa216f-034c-4be1-9041-706032f78479" containerName="collect-profiles" Dec 05 17:30:25 crc kubenswrapper[4753]: I1205 17:30:25.390073 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="0eaa216f-034c-4be1-9041-706032f78479" containerName="collect-profiles" Dec 05 17:30:25 crc kubenswrapper[4753]: I1205 17:30:25.391530 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-m5k72" Dec 05 17:30:25 crc kubenswrapper[4753]: I1205 17:30:25.408320 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-m5k72"] Dec 05 17:30:25 crc kubenswrapper[4753]: I1205 17:30:25.566456 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df9c3114-8e90-4129-9a46-a99986adccd2-utilities\") pod \"certified-operators-m5k72\" (UID: \"df9c3114-8e90-4129-9a46-a99986adccd2\") " pod="openshift-marketplace/certified-operators-m5k72" Dec 05 17:30:25 crc kubenswrapper[4753]: I1205 17:30:25.566612 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mdg4z\" (UniqueName: \"kubernetes.io/projected/df9c3114-8e90-4129-9a46-a99986adccd2-kube-api-access-mdg4z\") pod \"certified-operators-m5k72\" (UID: \"df9c3114-8e90-4129-9a46-a99986adccd2\") " pod="openshift-marketplace/certified-operators-m5k72" Dec 05 17:30:25 crc kubenswrapper[4753]: I1205 17:30:25.566677 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df9c3114-8e90-4129-9a46-a99986adccd2-catalog-content\") pod \"certified-operators-m5k72\" (UID: \"df9c3114-8e90-4129-9a46-a99986adccd2\") " pod="openshift-marketplace/certified-operators-m5k72" Dec 05 17:30:25 crc kubenswrapper[4753]: I1205 17:30:25.667862 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df9c3114-8e90-4129-9a46-a99986adccd2-catalog-content\") pod \"certified-operators-m5k72\" (UID: \"df9c3114-8e90-4129-9a46-a99986adccd2\") " pod="openshift-marketplace/certified-operators-m5k72" Dec 05 17:30:25 crc kubenswrapper[4753]: I1205 17:30:25.667942 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df9c3114-8e90-4129-9a46-a99986adccd2-utilities\") pod \"certified-operators-m5k72\" (UID: \"df9c3114-8e90-4129-9a46-a99986adccd2\") " pod="openshift-marketplace/certified-operators-m5k72" Dec 05 17:30:25 crc kubenswrapper[4753]: I1205 17:30:25.668055 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mdg4z\" (UniqueName: \"kubernetes.io/projected/df9c3114-8e90-4129-9a46-a99986adccd2-kube-api-access-mdg4z\") pod \"certified-operators-m5k72\" (UID: \"df9c3114-8e90-4129-9a46-a99986adccd2\") " pod="openshift-marketplace/certified-operators-m5k72" Dec 05 17:30:25 crc kubenswrapper[4753]: I1205 17:30:25.668646 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df9c3114-8e90-4129-9a46-a99986adccd2-catalog-content\") pod \"certified-operators-m5k72\" (UID: \"df9c3114-8e90-4129-9a46-a99986adccd2\") " pod="openshift-marketplace/certified-operators-m5k72" Dec 05 17:30:25 crc kubenswrapper[4753]: I1205 17:30:25.668659 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df9c3114-8e90-4129-9a46-a99986adccd2-utilities\") pod \"certified-operators-m5k72\" (UID: \"df9c3114-8e90-4129-9a46-a99986adccd2\") " pod="openshift-marketplace/certified-operators-m5k72" Dec 05 17:30:25 crc kubenswrapper[4753]: I1205 17:30:25.694873 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mdg4z\" (UniqueName: \"kubernetes.io/projected/df9c3114-8e90-4129-9a46-a99986adccd2-kube-api-access-mdg4z\") pod \"certified-operators-m5k72\" (UID: \"df9c3114-8e90-4129-9a46-a99986adccd2\") " pod="openshift-marketplace/certified-operators-m5k72" Dec 05 17:30:25 crc kubenswrapper[4753]: I1205 17:30:25.719017 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-m5k72" Dec 05 17:30:26 crc kubenswrapper[4753]: I1205 17:30:26.080141 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-m5k72"] Dec 05 17:30:26 crc kubenswrapper[4753]: I1205 17:30:26.715321 4753 generic.go:334] "Generic (PLEG): container finished" podID="df9c3114-8e90-4129-9a46-a99986adccd2" containerID="9d1084642e571e7a3618ec0ca85873d1a77efa55b6652003dffe2918cb33dba0" exitCode=0 Dec 05 17:30:26 crc kubenswrapper[4753]: I1205 17:30:26.715759 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m5k72" event={"ID":"df9c3114-8e90-4129-9a46-a99986adccd2","Type":"ContainerDied","Data":"9d1084642e571e7a3618ec0ca85873d1a77efa55b6652003dffe2918cb33dba0"} Dec 05 17:30:26 crc kubenswrapper[4753]: I1205 17:30:26.715800 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m5k72" event={"ID":"df9c3114-8e90-4129-9a46-a99986adccd2","Type":"ContainerStarted","Data":"7b025a3c89c3c8e0d562ca27498bc316a39a1d6085ecff0e10449ae9c896b4db"} Dec 05 17:30:27 crc kubenswrapper[4753]: I1205 17:30:27.731351 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m5k72" event={"ID":"df9c3114-8e90-4129-9a46-a99986adccd2","Type":"ContainerStarted","Data":"ab650f8aef02aef873579bf857d2aa989650268fb9aedf6a7a768955b063961a"} Dec 05 17:30:28 crc kubenswrapper[4753]: I1205 17:30:28.738204 4753 generic.go:334] "Generic (PLEG): container finished" podID="df9c3114-8e90-4129-9a46-a99986adccd2" containerID="ab650f8aef02aef873579bf857d2aa989650268fb9aedf6a7a768955b063961a" exitCode=0 Dec 05 17:30:28 crc kubenswrapper[4753]: I1205 17:30:28.738270 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m5k72" event={"ID":"df9c3114-8e90-4129-9a46-a99986adccd2","Type":"ContainerDied","Data":"ab650f8aef02aef873579bf857d2aa989650268fb9aedf6a7a768955b063961a"} Dec 05 17:30:29 crc kubenswrapper[4753]: I1205 17:30:29.748093 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m5k72" event={"ID":"df9c3114-8e90-4129-9a46-a99986adccd2","Type":"ContainerStarted","Data":"c689a1cd33fe12943f8b3e11b3e57d99d3933761822e66e9f7c51f6ad917d407"} Dec 05 17:30:29 crc kubenswrapper[4753]: I1205 17:30:29.774667 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-m5k72" podStartSLOduration=2.271946747 podStartE2EDuration="4.774651172s" podCreationTimestamp="2025-12-05 17:30:25 +0000 UTC" firstStartedPulling="2025-12-05 17:30:26.717403429 +0000 UTC m=+1565.220510435" lastFinishedPulling="2025-12-05 17:30:29.220107854 +0000 UTC m=+1567.723214860" observedRunningTime="2025-12-05 17:30:29.773748867 +0000 UTC m=+1568.276855873" watchObservedRunningTime="2025-12-05 17:30:29.774651172 +0000 UTC m=+1568.277758178" Dec 05 17:30:33 crc kubenswrapper[4753]: I1205 17:30:33.643678 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-pkzrf"] Dec 05 17:30:33 crc kubenswrapper[4753]: I1205 17:30:33.645570 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-pkzrf" Dec 05 17:30:33 crc kubenswrapper[4753]: I1205 17:30:33.648270 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-k9647"] Dec 05 17:30:33 crc kubenswrapper[4753]: I1205 17:30:33.654427 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-g9sj5" Dec 05 17:30:33 crc kubenswrapper[4753]: I1205 17:30:33.658737 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-k9647" Dec 05 17:30:33 crc kubenswrapper[4753]: I1205 17:30:33.663839 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-9wm7h" Dec 05 17:30:33 crc kubenswrapper[4753]: I1205 17:30:33.665719 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-hhh6x"] Dec 05 17:30:33 crc kubenswrapper[4753]: I1205 17:30:33.673708 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-hhh6x" Dec 05 17:30:33 crc kubenswrapper[4753]: I1205 17:30:33.683468 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-pkzrf"] Dec 05 17:30:33 crc kubenswrapper[4753]: I1205 17:30:33.686951 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-hqjsc" Dec 05 17:30:33 crc kubenswrapper[4753]: I1205 17:30:33.693956 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-k9647"] Dec 05 17:30:33 crc kubenswrapper[4753]: I1205 17:30:33.712838 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-hhh6x"] Dec 05 17:30:33 crc kubenswrapper[4753]: I1205 17:30:33.746202 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987cd8cd-g7c45"] Dec 05 17:30:33 crc kubenswrapper[4753]: I1205 17:30:33.747536 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-g7c45" Dec 05 17:30:33 crc kubenswrapper[4753]: I1205 17:30:33.751042 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-mqsfh" Dec 05 17:30:33 crc kubenswrapper[4753]: I1205 17:30:33.769043 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987cd8cd-g7c45"] Dec 05 17:30:33 crc kubenswrapper[4753]: I1205 17:30:33.786226 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-n94z9"] Dec 05 17:30:33 crc kubenswrapper[4753]: I1205 17:30:33.787640 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-n94z9" Dec 05 17:30:33 crc kubenswrapper[4753]: I1205 17:30:33.794613 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-822b9" Dec 05 17:30:33 crc kubenswrapper[4753]: I1205 17:30:33.806420 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-48xtz\" (UniqueName: \"kubernetes.io/projected/6afd28dd-749e-409b-93ec-30cd85573a95-kube-api-access-48xtz\") pod \"cinder-operator-controller-manager-859b6ccc6-pkzrf\" (UID: \"6afd28dd-749e-409b-93ec-30cd85573a95\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-pkzrf" Dec 05 17:30:33 crc kubenswrapper[4753]: I1205 17:30:33.806502 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m58lg\" (UniqueName: \"kubernetes.io/projected/a5f4bc41-be86-43bd-b9af-d8d8cfac644e-kube-api-access-m58lg\") pod \"designate-operator-controller-manager-78b4bc895b-hhh6x\" (UID: \"a5f4bc41-be86-43bd-b9af-d8d8cfac644e\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-hhh6x" Dec 05 17:30:33 crc kubenswrapper[4753]: I1205 17:30:33.806650 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dxgzt\" (UniqueName: \"kubernetes.io/projected/304f4f1a-f42b-4904-9a77-9e26600eb591-kube-api-access-dxgzt\") pod \"barbican-operator-controller-manager-7d9dfd778-k9647\" (UID: \"304f4f1a-f42b-4904-9a77-9e26600eb591\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-k9647" Dec 05 17:30:33 crc kubenswrapper[4753]: I1205 17:30:33.818388 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-n94z9"] Dec 05 17:30:33 crc kubenswrapper[4753]: I1205 17:30:33.874011 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-9jsb5"] Dec 05 17:30:33 crc kubenswrapper[4753]: I1205 17:30:33.885589 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-9jsb5" Dec 05 17:30:33 crc kubenswrapper[4753]: I1205 17:30:33.894132 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-w92xv" Dec 05 17:30:33 crc kubenswrapper[4753]: I1205 17:30:33.900431 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-j6jtq"] Dec 05 17:30:33 crc kubenswrapper[4753]: I1205 17:30:33.911251 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-57548d458d-j6jtq" Dec 05 17:30:33 crc kubenswrapper[4753]: I1205 17:30:33.913014 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m58lg\" (UniqueName: \"kubernetes.io/projected/a5f4bc41-be86-43bd-b9af-d8d8cfac644e-kube-api-access-m58lg\") pod \"designate-operator-controller-manager-78b4bc895b-hhh6x\" (UID: \"a5f4bc41-be86-43bd-b9af-d8d8cfac644e\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-hhh6x" Dec 05 17:30:33 crc kubenswrapper[4753]: I1205 17:30:33.913502 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ppf8r\" (UniqueName: \"kubernetes.io/projected/e485d825-a020-45b2-a642-bba12e1a5112-kube-api-access-ppf8r\") pod \"glance-operator-controller-manager-77987cd8cd-g7c45\" (UID: \"e485d825-a020-45b2-a642-bba12e1a5112\") " pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-g7c45" Dec 05 17:30:33 crc kubenswrapper[4753]: I1205 17:30:33.913699 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dxgzt\" (UniqueName: \"kubernetes.io/projected/304f4f1a-f42b-4904-9a77-9e26600eb591-kube-api-access-dxgzt\") pod \"barbican-operator-controller-manager-7d9dfd778-k9647\" (UID: \"304f4f1a-f42b-4904-9a77-9e26600eb591\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-k9647" Dec 05 17:30:33 crc kubenswrapper[4753]: I1205 17:30:33.923093 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b99bh\" (UniqueName: \"kubernetes.io/projected/9472f12d-6c74-422c-8bc9-76a2ca161b77-kube-api-access-b99bh\") pod \"heat-operator-controller-manager-5f64f6f8bb-n94z9\" (UID: \"9472f12d-6c74-422c-8bc9-76a2ca161b77\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-n94z9" Dec 05 17:30:33 crc kubenswrapper[4753]: I1205 17:30:33.923218 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-48xtz\" (UniqueName: \"kubernetes.io/projected/6afd28dd-749e-409b-93ec-30cd85573a95-kube-api-access-48xtz\") pod \"cinder-operator-controller-manager-859b6ccc6-pkzrf\" (UID: \"6afd28dd-749e-409b-93ec-30cd85573a95\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-pkzrf" Dec 05 17:30:33 crc kubenswrapper[4753]: I1205 17:30:33.934621 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-m2wsk" Dec 05 17:30:33 crc kubenswrapper[4753]: I1205 17:30:33.934879 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Dec 05 17:30:33 crc kubenswrapper[4753]: I1205 17:30:33.944007 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m58lg\" (UniqueName: \"kubernetes.io/projected/a5f4bc41-be86-43bd-b9af-d8d8cfac644e-kube-api-access-m58lg\") pod \"designate-operator-controller-manager-78b4bc895b-hhh6x\" (UID: \"a5f4bc41-be86-43bd-b9af-d8d8cfac644e\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-hhh6x" Dec 05 17:30:33 crc kubenswrapper[4753]: I1205 17:30:33.951750 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-9jsb5"] Dec 05 17:30:33 crc kubenswrapper[4753]: I1205 17:30:33.958258 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-j6jtq"] Dec 05 17:30:33 crc kubenswrapper[4753]: I1205 17:30:33.981616 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-kj4zh"] Dec 05 17:30:33 crc kubenswrapper[4753]: I1205 17:30:33.983070 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-kj4zh" Dec 05 17:30:33 crc kubenswrapper[4753]: I1205 17:30:33.999497 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-bbjj6" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.003728 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-hhh6x" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.003810 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dxgzt\" (UniqueName: \"kubernetes.io/projected/304f4f1a-f42b-4904-9a77-9e26600eb591-kube-api-access-dxgzt\") pod \"barbican-operator-controller-manager-7d9dfd778-k9647\" (UID: \"304f4f1a-f42b-4904-9a77-9e26600eb591\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-k9647" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.010647 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-s6759"] Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.013281 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-s6759" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.022944 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-48xtz\" (UniqueName: \"kubernetes.io/projected/6afd28dd-749e-409b-93ec-30cd85573a95-kube-api-access-48xtz\") pod \"cinder-operator-controller-manager-859b6ccc6-pkzrf\" (UID: \"6afd28dd-749e-409b-93ec-30cd85573a95\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-pkzrf" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.030714 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-jsk5m" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.031393 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-kj4zh"] Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.032439 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e2bab632-3631-4dcf-b337-12982b375999-cert\") pod \"infra-operator-controller-manager-57548d458d-j6jtq\" (UID: \"e2bab632-3631-4dcf-b337-12982b375999\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-j6jtq" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.032601 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ppf8r\" (UniqueName: \"kubernetes.io/projected/e485d825-a020-45b2-a642-bba12e1a5112-kube-api-access-ppf8r\") pod \"glance-operator-controller-manager-77987cd8cd-g7c45\" (UID: \"e485d825-a020-45b2-a642-bba12e1a5112\") " pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-g7c45" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.032778 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b99bh\" (UniqueName: \"kubernetes.io/projected/9472f12d-6c74-422c-8bc9-76a2ca161b77-kube-api-access-b99bh\") pod \"heat-operator-controller-manager-5f64f6f8bb-n94z9\" (UID: \"9472f12d-6c74-422c-8bc9-76a2ca161b77\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-n94z9" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.033058 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x7jv9\" (UniqueName: \"kubernetes.io/projected/9712e3f3-fe07-4f19-b04f-6736375fd440-kube-api-access-x7jv9\") pod \"horizon-operator-controller-manager-68c6d99b8f-9jsb5\" (UID: \"9712e3f3-fe07-4f19-b04f-6736375fd440\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-9jsb5" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.033183 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8vqfl\" (UniqueName: \"kubernetes.io/projected/e2bab632-3631-4dcf-b337-12982b375999-kube-api-access-8vqfl\") pod \"infra-operator-controller-manager-57548d458d-j6jtq\" (UID: \"e2bab632-3631-4dcf-b337-12982b375999\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-j6jtq" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.051926 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-s6759"] Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.067236 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-qpd7t"] Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.080636 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ppf8r\" (UniqueName: \"kubernetes.io/projected/e485d825-a020-45b2-a642-bba12e1a5112-kube-api-access-ppf8r\") pod \"glance-operator-controller-manager-77987cd8cd-g7c45\" (UID: \"e485d825-a020-45b2-a642-bba12e1a5112\") " pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-g7c45" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.080727 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b99bh\" (UniqueName: \"kubernetes.io/projected/9472f12d-6c74-422c-8bc9-76a2ca161b77-kube-api-access-b99bh\") pod \"heat-operator-controller-manager-5f64f6f8bb-n94z9\" (UID: \"9472f12d-6c74-422c-8bc9-76a2ca161b77\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-n94z9" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.084895 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-s2rgf"] Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.086285 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-s2rgf" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.086410 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-qpd7t" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.098500 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-vcc5x"] Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.100447 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-vcc5x" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.104694 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-59rvr" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.105035 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-x5d2p" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.112650 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-n94z9" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.123695 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-jxz2s" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.125953 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-s2rgf"] Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.146354 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fh6p2\" (UniqueName: \"kubernetes.io/projected/2d850458-6add-4a44-b1c6-7dba1e8993ab-kube-api-access-fh6p2\") pod \"ironic-operator-controller-manager-6c548fd776-kj4zh\" (UID: \"2d850458-6add-4a44-b1c6-7dba1e8993ab\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-kj4zh" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.146836 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e2bab632-3631-4dcf-b337-12982b375999-cert\") pod \"infra-operator-controller-manager-57548d458d-j6jtq\" (UID: \"e2bab632-3631-4dcf-b337-12982b375999\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-j6jtq" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.146958 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bb7qr\" (UniqueName: \"kubernetes.io/projected/a3db9b5b-a4b2-40f5-93a0-84ecb72c1c2e-kube-api-access-bb7qr\") pod \"keystone-operator-controller-manager-7765d96ddf-s6759\" (UID: \"a3db9b5b-a4b2-40f5-93a0-84ecb72c1c2e\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-s6759" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.147105 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x7jv9\" (UniqueName: \"kubernetes.io/projected/9712e3f3-fe07-4f19-b04f-6736375fd440-kube-api-access-x7jv9\") pod \"horizon-operator-controller-manager-68c6d99b8f-9jsb5\" (UID: \"9712e3f3-fe07-4f19-b04f-6736375fd440\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-9jsb5" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.147235 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8vqfl\" (UniqueName: \"kubernetes.io/projected/e2bab632-3631-4dcf-b337-12982b375999-kube-api-access-8vqfl\") pod \"infra-operator-controller-manager-57548d458d-j6jtq\" (UID: \"e2bab632-3631-4dcf-b337-12982b375999\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-j6jtq" Dec 05 17:30:34 crc kubenswrapper[4753]: E1205 17:30:34.147830 4753 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 05 17:30:34 crc kubenswrapper[4753]: E1205 17:30:34.147980 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e2bab632-3631-4dcf-b337-12982b375999-cert podName:e2bab632-3631-4dcf-b337-12982b375999 nodeName:}" failed. No retries permitted until 2025-12-05 17:30:34.647956955 +0000 UTC m=+1573.151063961 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/e2bab632-3631-4dcf-b337-12982b375999-cert") pod "infra-operator-controller-manager-57548d458d-j6jtq" (UID: "e2bab632-3631-4dcf-b337-12982b375999") : secret "infra-operator-webhook-server-cert" not found Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.162610 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-qpd7t"] Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.192905 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8vqfl\" (UniqueName: \"kubernetes.io/projected/e2bab632-3631-4dcf-b337-12982b375999-kube-api-access-8vqfl\") pod \"infra-operator-controller-manager-57548d458d-j6jtq\" (UID: \"e2bab632-3631-4dcf-b337-12982b375999\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-j6jtq" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.195245 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-vcc5x"] Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.202604 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x7jv9\" (UniqueName: \"kubernetes.io/projected/9712e3f3-fe07-4f19-b04f-6736375fd440-kube-api-access-x7jv9\") pod \"horizon-operator-controller-manager-68c6d99b8f-9jsb5\" (UID: \"9712e3f3-fe07-4f19-b04f-6736375fd440\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-9jsb5" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.211243 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-hf596"] Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.212978 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-hf596" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.224598 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-tt8b8" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.234906 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-9jsb5" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.250385 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pf59x\" (UniqueName: \"kubernetes.io/projected/85fd7687-c296-460a-a2b2-3da36c97efe6-kube-api-access-pf59x\") pod \"mariadb-operator-controller-manager-56bbcc9d85-s2rgf\" (UID: \"85fd7687-c296-460a-a2b2-3da36c97efe6\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-s2rgf" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.250476 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bb7qr\" (UniqueName: \"kubernetes.io/projected/a3db9b5b-a4b2-40f5-93a0-84ecb72c1c2e-kube-api-access-bb7qr\") pod \"keystone-operator-controller-manager-7765d96ddf-s6759\" (UID: \"a3db9b5b-a4b2-40f5-93a0-84ecb72c1c2e\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-s6759" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.250556 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4shl6\" (UniqueName: \"kubernetes.io/projected/0d335585-bcd8-4ddf-a693-421d6d3bf6d2-kube-api-access-4shl6\") pod \"manila-operator-controller-manager-7c79b5df47-qpd7t\" (UID: \"0d335585-bcd8-4ddf-a693-421d6d3bf6d2\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-qpd7t" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.250580 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2979h\" (UniqueName: \"kubernetes.io/projected/09be61f0-7174-4035-a8f9-315ca512dea4-kube-api-access-2979h\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-vcc5x\" (UID: \"09be61f0-7174-4035-a8f9-315ca512dea4\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-vcc5x" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.250614 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fh6p2\" (UniqueName: \"kubernetes.io/projected/2d850458-6add-4a44-b1c6-7dba1e8993ab-kube-api-access-fh6p2\") pod \"ironic-operator-controller-manager-6c548fd776-kj4zh\" (UID: \"2d850458-6add-4a44-b1c6-7dba1e8993ab\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-kj4zh" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.260238 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-72b7n"] Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.261737 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-998648c74-72b7n" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.265521 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-pkzrf" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.268679 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-w948c" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.288067 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fh6p2\" (UniqueName: \"kubernetes.io/projected/2d850458-6add-4a44-b1c6-7dba1e8993ab-kube-api-access-fh6p2\") pod \"ironic-operator-controller-manager-6c548fd776-kj4zh\" (UID: \"2d850458-6add-4a44-b1c6-7dba1e8993ab\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-kj4zh" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.289999 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bb7qr\" (UniqueName: \"kubernetes.io/projected/a3db9b5b-a4b2-40f5-93a0-84ecb72c1c2e-kube-api-access-bb7qr\") pod \"keystone-operator-controller-manager-7765d96ddf-s6759\" (UID: \"a3db9b5b-a4b2-40f5-93a0-84ecb72c1c2e\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-s6759" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.298654 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-k9647" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.299221 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-hf596"] Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.334598 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-72b7n"] Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.334748 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-dhjvc"] Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.337412 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-dhjvc" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.364259 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-thknm\" (UniqueName: \"kubernetes.io/projected/2f396259-4eaa-465d-9674-9999d750b1f6-kube-api-access-thknm\") pod \"octavia-operator-controller-manager-998648c74-72b7n\" (UID: \"2f396259-4eaa-465d-9674-9999d750b1f6\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-72b7n" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.365007 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s4pr7\" (UniqueName: \"kubernetes.io/projected/a4590a35-52c3-45a7-ba18-81d2db73c384-kube-api-access-s4pr7\") pod \"nova-operator-controller-manager-697bc559fc-hf596\" (UID: \"a4590a35-52c3-45a7-ba18-81d2db73c384\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-hf596" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.365043 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4shl6\" (UniqueName: \"kubernetes.io/projected/0d335585-bcd8-4ddf-a693-421d6d3bf6d2-kube-api-access-4shl6\") pod \"manila-operator-controller-manager-7c79b5df47-qpd7t\" (UID: \"0d335585-bcd8-4ddf-a693-421d6d3bf6d2\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-qpd7t" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.365074 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2979h\" (UniqueName: \"kubernetes.io/projected/09be61f0-7174-4035-a8f9-315ca512dea4-kube-api-access-2979h\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-vcc5x\" (UID: \"09be61f0-7174-4035-a8f9-315ca512dea4\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-vcc5x" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.365123 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pf59x\" (UniqueName: \"kubernetes.io/projected/85fd7687-c296-460a-a2b2-3da36c97efe6-kube-api-access-pf59x\") pod \"mariadb-operator-controller-manager-56bbcc9d85-s2rgf\" (UID: \"85fd7687-c296-460a-a2b2-3da36c97efe6\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-s2rgf" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.365731 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-kj4zh" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.372719 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-g7c45" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.374214 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-qztwz" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.381308 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-l5jmt"] Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.382725 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-78f8948974-l5jmt" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.403756 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-n8mq6" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.404054 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd462vzf"] Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.408075 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-dhjvc"] Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.408234 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd462vzf" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.415141 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.421138 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-8fdbn"] Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.415521 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-l4q5x" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.432553 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-8fdbn" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.442230 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-pxxw8" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.458665 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pf59x\" (UniqueName: \"kubernetes.io/projected/85fd7687-c296-460a-a2b2-3da36c97efe6-kube-api-access-pf59x\") pod \"mariadb-operator-controller-manager-56bbcc9d85-s2rgf\" (UID: \"85fd7687-c296-460a-a2b2-3da36c97efe6\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-s2rgf" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.471938 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s4pr7\" (UniqueName: \"kubernetes.io/projected/a4590a35-52c3-45a7-ba18-81d2db73c384-kube-api-access-s4pr7\") pod \"nova-operator-controller-manager-697bc559fc-hf596\" (UID: \"a4590a35-52c3-45a7-ba18-81d2db73c384\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-hf596" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.472141 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-thknm\" (UniqueName: \"kubernetes.io/projected/2f396259-4eaa-465d-9674-9999d750b1f6-kube-api-access-thknm\") pod \"octavia-operator-controller-manager-998648c74-72b7n\" (UID: \"2f396259-4eaa-465d-9674-9999d750b1f6\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-72b7n" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.472228 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-trc5l\" (UniqueName: \"kubernetes.io/projected/8e53ed9e-05c6-4a84-894d-85f427a53f72-kube-api-access-trc5l\") pod \"ovn-operator-controller-manager-b6456fdb6-dhjvc\" (UID: \"8e53ed9e-05c6-4a84-894d-85f427a53f72\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-dhjvc" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.474802 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-l5jmt"] Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.475837 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-s6759" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.479073 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4shl6\" (UniqueName: \"kubernetes.io/projected/0d335585-bcd8-4ddf-a693-421d6d3bf6d2-kube-api-access-4shl6\") pod \"manila-operator-controller-manager-7c79b5df47-qpd7t\" (UID: \"0d335585-bcd8-4ddf-a693-421d6d3bf6d2\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-qpd7t" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.479257 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2979h\" (UniqueName: \"kubernetes.io/projected/09be61f0-7174-4035-a8f9-315ca512dea4-kube-api-access-2979h\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-vcc5x\" (UID: \"09be61f0-7174-4035-a8f9-315ca512dea4\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-vcc5x" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.492693 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-p5j4k"] Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.534251 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5854674fcc-p5j4k" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.553651 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-s2rgf" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.567813 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-hrflj" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.570579 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-qpd7t" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.574266 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s4pr7\" (UniqueName: \"kubernetes.io/projected/a4590a35-52c3-45a7-ba18-81d2db73c384-kube-api-access-s4pr7\") pod \"nova-operator-controller-manager-697bc559fc-hf596\" (UID: \"a4590a35-52c3-45a7-ba18-81d2db73c384\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-hf596" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.574308 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-trc5l\" (UniqueName: \"kubernetes.io/projected/8e53ed9e-05c6-4a84-894d-85f427a53f72-kube-api-access-trc5l\") pod \"ovn-operator-controller-manager-b6456fdb6-dhjvc\" (UID: \"8e53ed9e-05c6-4a84-894d-85f427a53f72\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-dhjvc" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.574732 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n2w4f\" (UniqueName: \"kubernetes.io/projected/9e8cbdbf-2604-4e04-a56a-6f2175c09abe-kube-api-access-n2w4f\") pod \"swift-operator-controller-manager-5f8c65bbfc-8fdbn\" (UID: \"9e8cbdbf-2604-4e04-a56a-6f2175c09abe\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-8fdbn" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.574872 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6vvzf\" (UniqueName: \"kubernetes.io/projected/ebc7e3cf-3701-42f9-a6ca-43f11424a0b3-kube-api-access-6vvzf\") pod \"placement-operator-controller-manager-78f8948974-l5jmt\" (UID: \"ebc7e3cf-3701-42f9-a6ca-43f11424a0b3\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-l5jmt" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.575471 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j9btj\" (UniqueName: \"kubernetes.io/projected/269f75ec-a232-47f3-8cc8-e9e4c8e9717d-kube-api-access-j9btj\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd462vzf\" (UID: \"269f75ec-a232-47f3-8cc8-e9e4c8e9717d\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd462vzf" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.575599 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/269f75ec-a232-47f3-8cc8-e9e4c8e9717d-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd462vzf\" (UID: \"269f75ec-a232-47f3-8cc8-e9e4c8e9717d\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd462vzf" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.598249 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-thknm\" (UniqueName: \"kubernetes.io/projected/2f396259-4eaa-465d-9674-9999d750b1f6-kube-api-access-thknm\") pod \"octavia-operator-controller-manager-998648c74-72b7n\" (UID: \"2f396259-4eaa-465d-9674-9999d750b1f6\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-72b7n" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.611870 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-vcc5x" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.629939 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-trc5l\" (UniqueName: \"kubernetes.io/projected/8e53ed9e-05c6-4a84-894d-85f427a53f72-kube-api-access-trc5l\") pod \"ovn-operator-controller-manager-b6456fdb6-dhjvc\" (UID: \"8e53ed9e-05c6-4a84-894d-85f427a53f72\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-dhjvc" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.636424 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-hf596" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.643187 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd462vzf"] Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.660344 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-8fdbn"] Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.673466 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-75c997498-r5zvk"] Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.678178 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m5jg9\" (UniqueName: \"kubernetes.io/projected/444bb95c-a503-40a1-a99e-64d04b3c8930-kube-api-access-m5jg9\") pod \"test-operator-controller-manager-5854674fcc-p5j4k\" (UID: \"444bb95c-a503-40a1-a99e-64d04b3c8930\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-p5j4k" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.678287 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j9btj\" (UniqueName: \"kubernetes.io/projected/269f75ec-a232-47f3-8cc8-e9e4c8e9717d-kube-api-access-j9btj\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd462vzf\" (UID: \"269f75ec-a232-47f3-8cc8-e9e4c8e9717d\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd462vzf" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.678333 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/269f75ec-a232-47f3-8cc8-e9e4c8e9717d-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd462vzf\" (UID: \"269f75ec-a232-47f3-8cc8-e9e4c8e9717d\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd462vzf" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.678400 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e2bab632-3631-4dcf-b337-12982b375999-cert\") pod \"infra-operator-controller-manager-57548d458d-j6jtq\" (UID: \"e2bab632-3631-4dcf-b337-12982b375999\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-j6jtq" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.678459 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n2w4f\" (UniqueName: \"kubernetes.io/projected/9e8cbdbf-2604-4e04-a56a-6f2175c09abe-kube-api-access-n2w4f\") pod \"swift-operator-controller-manager-5f8c65bbfc-8fdbn\" (UID: \"9e8cbdbf-2604-4e04-a56a-6f2175c09abe\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-8fdbn" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.678517 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6vvzf\" (UniqueName: \"kubernetes.io/projected/ebc7e3cf-3701-42f9-a6ca-43f11424a0b3-kube-api-access-6vvzf\") pod \"placement-operator-controller-manager-78f8948974-l5jmt\" (UID: \"ebc7e3cf-3701-42f9-a6ca-43f11424a0b3\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-l5jmt" Dec 05 17:30:34 crc kubenswrapper[4753]: E1205 17:30:34.679410 4753 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 17:30:34 crc kubenswrapper[4753]: E1205 17:30:34.679485 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/269f75ec-a232-47f3-8cc8-e9e4c8e9717d-cert podName:269f75ec-a232-47f3-8cc8-e9e4c8e9717d nodeName:}" failed. No retries permitted until 2025-12-05 17:30:35.179455064 +0000 UTC m=+1573.682562070 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/269f75ec-a232-47f3-8cc8-e9e4c8e9717d-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd462vzf" (UID: "269f75ec-a232-47f3-8cc8-e9e4c8e9717d") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 17:30:34 crc kubenswrapper[4753]: E1205 17:30:34.679631 4753 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 05 17:30:34 crc kubenswrapper[4753]: E1205 17:30:34.679728 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e2bab632-3631-4dcf-b337-12982b375999-cert podName:e2bab632-3631-4dcf-b337-12982b375999 nodeName:}" failed. No retries permitted until 2025-12-05 17:30:35.679693201 +0000 UTC m=+1574.182800207 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/e2bab632-3631-4dcf-b337-12982b375999-cert") pod "infra-operator-controller-manager-57548d458d-j6jtq" (UID: "e2bab632-3631-4dcf-b337-12982b375999") : secret "infra-operator-webhook-server-cert" not found Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.693430 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-p5j4k"] Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.694711 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-75c997498-r5zvk" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.704635 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-6vhsf" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.724462 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-998648c74-72b7n" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.726051 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n2w4f\" (UniqueName: \"kubernetes.io/projected/9e8cbdbf-2604-4e04-a56a-6f2175c09abe-kube-api-access-n2w4f\") pod \"swift-operator-controller-manager-5f8c65bbfc-8fdbn\" (UID: \"9e8cbdbf-2604-4e04-a56a-6f2175c09abe\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-8fdbn" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.726692 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j9btj\" (UniqueName: \"kubernetes.io/projected/269f75ec-a232-47f3-8cc8-e9e4c8e9717d-kube-api-access-j9btj\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd462vzf\" (UID: \"269f75ec-a232-47f3-8cc8-e9e4c8e9717d\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd462vzf" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.726878 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-dhjvc" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.730787 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6vvzf\" (UniqueName: \"kubernetes.io/projected/ebc7e3cf-3701-42f9-a6ca-43f11424a0b3-kube-api-access-6vvzf\") pod \"placement-operator-controller-manager-78f8948974-l5jmt\" (UID: \"ebc7e3cf-3701-42f9-a6ca-43f11424a0b3\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-l5jmt" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.773782 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-75c997498-r5zvk"] Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.776242 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-7wzfq"] Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.777940 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-7wzfq" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.778740 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-78f8948974-l5jmt" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.779938 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jtz5q\" (UniqueName: \"kubernetes.io/projected/41186805-7b90-44a5-b6d6-fe4b6b4d9a79-kube-api-access-jtz5q\") pod \"telemetry-operator-controller-manager-75c997498-r5zvk\" (UID: \"41186805-7b90-44a5-b6d6-fe4b6b4d9a79\") " pod="openstack-operators/telemetry-operator-controller-manager-75c997498-r5zvk" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.780055 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m5jg9\" (UniqueName: \"kubernetes.io/projected/444bb95c-a503-40a1-a99e-64d04b3c8930-kube-api-access-m5jg9\") pod \"test-operator-controller-manager-5854674fcc-p5j4k\" (UID: \"444bb95c-a503-40a1-a99e-64d04b3c8930\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-p5j4k" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.781494 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-dvc7f" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.817478 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-7wzfq"] Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.819499 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m5jg9\" (UniqueName: \"kubernetes.io/projected/444bb95c-a503-40a1-a99e-64d04b3c8930-kube-api-access-m5jg9\") pod \"test-operator-controller-manager-5854674fcc-p5j4k\" (UID: \"444bb95c-a503-40a1-a99e-64d04b3c8930\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-p5j4k" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.871658 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-6dd6c4f769-lq2pn"] Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.872910 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-6dd6c4f769-lq2pn" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.877005 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"metrics-server-cert" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.877340 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.877524 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-hdjgw" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.882125 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jtz5q\" (UniqueName: \"kubernetes.io/projected/41186805-7b90-44a5-b6d6-fe4b6b4d9a79-kube-api-access-jtz5q\") pod \"telemetry-operator-controller-manager-75c997498-r5zvk\" (UID: \"41186805-7b90-44a5-b6d6-fe4b6b4d9a79\") " pod="openstack-operators/telemetry-operator-controller-manager-75c997498-r5zvk" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.882181 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b5v44\" (UniqueName: \"kubernetes.io/projected/7b847f69-4008-4acf-bddf-e0ea5a07b6bd-kube-api-access-b5v44\") pod \"watcher-operator-controller-manager-769dc69bc-7wzfq\" (UID: \"7b847f69-4008-4acf-bddf-e0ea5a07b6bd\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-7wzfq" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.912692 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-8fdbn" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.914980 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-6dd6c4f769-lq2pn"] Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.920306 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5854674fcc-p5j4k" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.949584 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jtz5q\" (UniqueName: \"kubernetes.io/projected/41186805-7b90-44a5-b6d6-fe4b6b4d9a79-kube-api-access-jtz5q\") pod \"telemetry-operator-controller-manager-75c997498-r5zvk\" (UID: \"41186805-7b90-44a5-b6d6-fe4b6b4d9a79\") " pod="openstack-operators/telemetry-operator-controller-manager-75c997498-r5zvk" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.984568 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/ac0841fe-3cba-4397-bfec-67a9cbec6861-webhook-certs\") pod \"openstack-operator-controller-manager-6dd6c4f769-lq2pn\" (UID: \"ac0841fe-3cba-4397-bfec-67a9cbec6861\") " pod="openstack-operators/openstack-operator-controller-manager-6dd6c4f769-lq2pn" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.984732 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b5v44\" (UniqueName: \"kubernetes.io/projected/7b847f69-4008-4acf-bddf-e0ea5a07b6bd-kube-api-access-b5v44\") pod \"watcher-operator-controller-manager-769dc69bc-7wzfq\" (UID: \"7b847f69-4008-4acf-bddf-e0ea5a07b6bd\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-7wzfq" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.984757 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4xpcb\" (UniqueName: \"kubernetes.io/projected/ac0841fe-3cba-4397-bfec-67a9cbec6861-kube-api-access-4xpcb\") pod \"openstack-operator-controller-manager-6dd6c4f769-lq2pn\" (UID: \"ac0841fe-3cba-4397-bfec-67a9cbec6861\") " pod="openstack-operators/openstack-operator-controller-manager-6dd6c4f769-lq2pn" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.984815 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ac0841fe-3cba-4397-bfec-67a9cbec6861-metrics-certs\") pod \"openstack-operator-controller-manager-6dd6c4f769-lq2pn\" (UID: \"ac0841fe-3cba-4397-bfec-67a9cbec6861\") " pod="openstack-operators/openstack-operator-controller-manager-6dd6c4f769-lq2pn" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.987431 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-zl7c6"] Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.989739 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-zl7c6" Dec 05 17:30:34 crc kubenswrapper[4753]: I1205 17:30:34.997577 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-sh5nq" Dec 05 17:30:35 crc kubenswrapper[4753]: I1205 17:30:35.008848 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b5v44\" (UniqueName: \"kubernetes.io/projected/7b847f69-4008-4acf-bddf-e0ea5a07b6bd-kube-api-access-b5v44\") pod \"watcher-operator-controller-manager-769dc69bc-7wzfq\" (UID: \"7b847f69-4008-4acf-bddf-e0ea5a07b6bd\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-7wzfq" Dec 05 17:30:35 crc kubenswrapper[4753]: I1205 17:30:35.022457 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-zl7c6"] Dec 05 17:30:35 crc kubenswrapper[4753]: I1205 17:30:35.086475 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4xpcb\" (UniqueName: \"kubernetes.io/projected/ac0841fe-3cba-4397-bfec-67a9cbec6861-kube-api-access-4xpcb\") pod \"openstack-operator-controller-manager-6dd6c4f769-lq2pn\" (UID: \"ac0841fe-3cba-4397-bfec-67a9cbec6861\") " pod="openstack-operators/openstack-operator-controller-manager-6dd6c4f769-lq2pn" Dec 05 17:30:35 crc kubenswrapper[4753]: I1205 17:30:35.086543 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ac0841fe-3cba-4397-bfec-67a9cbec6861-metrics-certs\") pod \"openstack-operator-controller-manager-6dd6c4f769-lq2pn\" (UID: \"ac0841fe-3cba-4397-bfec-67a9cbec6861\") " pod="openstack-operators/openstack-operator-controller-manager-6dd6c4f769-lq2pn" Dec 05 17:30:35 crc kubenswrapper[4753]: I1205 17:30:35.086637 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/ac0841fe-3cba-4397-bfec-67a9cbec6861-webhook-certs\") pod \"openstack-operator-controller-manager-6dd6c4f769-lq2pn\" (UID: \"ac0841fe-3cba-4397-bfec-67a9cbec6861\") " pod="openstack-operators/openstack-operator-controller-manager-6dd6c4f769-lq2pn" Dec 05 17:30:35 crc kubenswrapper[4753]: I1205 17:30:35.086739 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rzsdf\" (UniqueName: \"kubernetes.io/projected/cca913d2-4dbb-4cd8-8575-4af52cc95501-kube-api-access-rzsdf\") pod \"rabbitmq-cluster-operator-manager-668c99d594-zl7c6\" (UID: \"cca913d2-4dbb-4cd8-8575-4af52cc95501\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-zl7c6" Dec 05 17:30:35 crc kubenswrapper[4753]: E1205 17:30:35.087066 4753 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 05 17:30:35 crc kubenswrapper[4753]: E1205 17:30:35.087219 4753 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 05 17:30:35 crc kubenswrapper[4753]: E1205 17:30:35.087234 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ac0841fe-3cba-4397-bfec-67a9cbec6861-metrics-certs podName:ac0841fe-3cba-4397-bfec-67a9cbec6861 nodeName:}" failed. No retries permitted until 2025-12-05 17:30:35.587205031 +0000 UTC m=+1574.090312037 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/ac0841fe-3cba-4397-bfec-67a9cbec6861-metrics-certs") pod "openstack-operator-controller-manager-6dd6c4f769-lq2pn" (UID: "ac0841fe-3cba-4397-bfec-67a9cbec6861") : secret "metrics-server-cert" not found Dec 05 17:30:35 crc kubenswrapper[4753]: E1205 17:30:35.087288 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ac0841fe-3cba-4397-bfec-67a9cbec6861-webhook-certs podName:ac0841fe-3cba-4397-bfec-67a9cbec6861 nodeName:}" failed. No retries permitted until 2025-12-05 17:30:35.587271683 +0000 UTC m=+1574.090378689 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/ac0841fe-3cba-4397-bfec-67a9cbec6861-webhook-certs") pod "openstack-operator-controller-manager-6dd6c4f769-lq2pn" (UID: "ac0841fe-3cba-4397-bfec-67a9cbec6861") : secret "webhook-server-cert" not found Dec 05 17:30:35 crc kubenswrapper[4753]: I1205 17:30:35.089657 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-75c997498-r5zvk" Dec 05 17:30:35 crc kubenswrapper[4753]: I1205 17:30:35.118093 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4xpcb\" (UniqueName: \"kubernetes.io/projected/ac0841fe-3cba-4397-bfec-67a9cbec6861-kube-api-access-4xpcb\") pod \"openstack-operator-controller-manager-6dd6c4f769-lq2pn\" (UID: \"ac0841fe-3cba-4397-bfec-67a9cbec6861\") " pod="openstack-operators/openstack-operator-controller-manager-6dd6c4f769-lq2pn" Dec 05 17:30:35 crc kubenswrapper[4753]: I1205 17:30:35.166376 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-7wzfq" Dec 05 17:30:35 crc kubenswrapper[4753]: I1205 17:30:35.193601 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/269f75ec-a232-47f3-8cc8-e9e4c8e9717d-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd462vzf\" (UID: \"269f75ec-a232-47f3-8cc8-e9e4c8e9717d\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd462vzf" Dec 05 17:30:35 crc kubenswrapper[4753]: E1205 17:30:35.195430 4753 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 17:30:35 crc kubenswrapper[4753]: I1205 17:30:35.206608 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rzsdf\" (UniqueName: \"kubernetes.io/projected/cca913d2-4dbb-4cd8-8575-4af52cc95501-kube-api-access-rzsdf\") pod \"rabbitmq-cluster-operator-manager-668c99d594-zl7c6\" (UID: \"cca913d2-4dbb-4cd8-8575-4af52cc95501\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-zl7c6" Dec 05 17:30:35 crc kubenswrapper[4753]: E1205 17:30:35.206670 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/269f75ec-a232-47f3-8cc8-e9e4c8e9717d-cert podName:269f75ec-a232-47f3-8cc8-e9e4c8e9717d nodeName:}" failed. No retries permitted until 2025-12-05 17:30:36.20659383 +0000 UTC m=+1574.709700956 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/269f75ec-a232-47f3-8cc8-e9e4c8e9717d-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd462vzf" (UID: "269f75ec-a232-47f3-8cc8-e9e4c8e9717d") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 17:30:35 crc kubenswrapper[4753]: I1205 17:30:35.258263 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rzsdf\" (UniqueName: \"kubernetes.io/projected/cca913d2-4dbb-4cd8-8575-4af52cc95501-kube-api-access-rzsdf\") pod \"rabbitmq-cluster-operator-manager-668c99d594-zl7c6\" (UID: \"cca913d2-4dbb-4cd8-8575-4af52cc95501\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-zl7c6" Dec 05 17:30:35 crc kubenswrapper[4753]: I1205 17:30:35.271062 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-9jsb5"] Dec 05 17:30:35 crc kubenswrapper[4753]: I1205 17:30:35.296413 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-n94z9"] Dec 05 17:30:35 crc kubenswrapper[4753]: I1205 17:30:35.306481 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-hhh6x"] Dec 05 17:30:35 crc kubenswrapper[4753]: W1205 17:30:35.326176 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9712e3f3_fe07_4f19_b04f_6736375fd440.slice/crio-8810c0e91c512e01357adfb82ead88c1004741d359689cc7523dbc3bfd1041a6 WatchSource:0}: Error finding container 8810c0e91c512e01357adfb82ead88c1004741d359689cc7523dbc3bfd1041a6: Status 404 returned error can't find the container with id 8810c0e91c512e01357adfb82ead88c1004741d359689cc7523dbc3bfd1041a6 Dec 05 17:30:35 crc kubenswrapper[4753]: I1205 17:30:35.493451 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-zl7c6" Dec 05 17:30:35 crc kubenswrapper[4753]: I1205 17:30:35.627732 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ac0841fe-3cba-4397-bfec-67a9cbec6861-metrics-certs\") pod \"openstack-operator-controller-manager-6dd6c4f769-lq2pn\" (UID: \"ac0841fe-3cba-4397-bfec-67a9cbec6861\") " pod="openstack-operators/openstack-operator-controller-manager-6dd6c4f769-lq2pn" Dec 05 17:30:35 crc kubenswrapper[4753]: I1205 17:30:35.628276 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/ac0841fe-3cba-4397-bfec-67a9cbec6861-webhook-certs\") pod \"openstack-operator-controller-manager-6dd6c4f769-lq2pn\" (UID: \"ac0841fe-3cba-4397-bfec-67a9cbec6861\") " pod="openstack-operators/openstack-operator-controller-manager-6dd6c4f769-lq2pn" Dec 05 17:30:35 crc kubenswrapper[4753]: E1205 17:30:35.628883 4753 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 05 17:30:35 crc kubenswrapper[4753]: E1205 17:30:35.629405 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ac0841fe-3cba-4397-bfec-67a9cbec6861-webhook-certs podName:ac0841fe-3cba-4397-bfec-67a9cbec6861 nodeName:}" failed. No retries permitted until 2025-12-05 17:30:36.629381521 +0000 UTC m=+1575.132488527 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/ac0841fe-3cba-4397-bfec-67a9cbec6861-webhook-certs") pod "openstack-operator-controller-manager-6dd6c4f769-lq2pn" (UID: "ac0841fe-3cba-4397-bfec-67a9cbec6861") : secret "webhook-server-cert" not found Dec 05 17:30:35 crc kubenswrapper[4753]: E1205 17:30:35.629463 4753 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 05 17:30:35 crc kubenswrapper[4753]: E1205 17:30:35.629482 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ac0841fe-3cba-4397-bfec-67a9cbec6861-metrics-certs podName:ac0841fe-3cba-4397-bfec-67a9cbec6861 nodeName:}" failed. No retries permitted until 2025-12-05 17:30:36.629477074 +0000 UTC m=+1575.132584080 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/ac0841fe-3cba-4397-bfec-67a9cbec6861-metrics-certs") pod "openstack-operator-controller-manager-6dd6c4f769-lq2pn" (UID: "ac0841fe-3cba-4397-bfec-67a9cbec6861") : secret "metrics-server-cert" not found Dec 05 17:30:35 crc kubenswrapper[4753]: I1205 17:30:35.730191 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e2bab632-3631-4dcf-b337-12982b375999-cert\") pod \"infra-operator-controller-manager-57548d458d-j6jtq\" (UID: \"e2bab632-3631-4dcf-b337-12982b375999\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-j6jtq" Dec 05 17:30:35 crc kubenswrapper[4753]: E1205 17:30:35.730471 4753 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 05 17:30:35 crc kubenswrapper[4753]: E1205 17:30:35.730519 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e2bab632-3631-4dcf-b337-12982b375999-cert podName:e2bab632-3631-4dcf-b337-12982b375999 nodeName:}" failed. No retries permitted until 2025-12-05 17:30:37.730503825 +0000 UTC m=+1576.233610831 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/e2bab632-3631-4dcf-b337-12982b375999-cert") pod "infra-operator-controller-manager-57548d458d-j6jtq" (UID: "e2bab632-3631-4dcf-b337-12982b375999") : secret "infra-operator-webhook-server-cert" not found Dec 05 17:30:35 crc kubenswrapper[4753]: I1205 17:30:35.738110 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-m5k72" Dec 05 17:30:35 crc kubenswrapper[4753]: I1205 17:30:35.738141 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-m5k72" Dec 05 17:30:35 crc kubenswrapper[4753]: I1205 17:30:35.747058 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-pkzrf"] Dec 05 17:30:35 crc kubenswrapper[4753]: I1205 17:30:35.757558 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-k9647"] Dec 05 17:30:35 crc kubenswrapper[4753]: I1205 17:30:35.769475 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-kj4zh"] Dec 05 17:30:35 crc kubenswrapper[4753]: W1205 17:30:35.782303 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2d850458_6add_4a44_b1c6_7dba1e8993ab.slice/crio-86267e300f43fcc25521ad9b802a118070c14e97179ffd5fc1fb4a016380e67c WatchSource:0}: Error finding container 86267e300f43fcc25521ad9b802a118070c14e97179ffd5fc1fb4a016380e67c: Status 404 returned error can't find the container with id 86267e300f43fcc25521ad9b802a118070c14e97179ffd5fc1fb4a016380e67c Dec 05 17:30:35 crc kubenswrapper[4753]: I1205 17:30:35.814077 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-m5k72" Dec 05 17:30:35 crc kubenswrapper[4753]: I1205 17:30:35.827596 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-hhh6x" event={"ID":"a5f4bc41-be86-43bd-b9af-d8d8cfac644e","Type":"ContainerStarted","Data":"159c3bbaabc08cb7a087b193ec83edeebbb16e6f1dd4a19e02189a5d368a48a4"} Dec 05 17:30:35 crc kubenswrapper[4753]: I1205 17:30:35.833666 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-n94z9" event={"ID":"9472f12d-6c74-422c-8bc9-76a2ca161b77","Type":"ContainerStarted","Data":"d87cbfc1e16cf80be75b9f30655e451e2ab7799686c5b777c01e9c9fd8077136"} Dec 05 17:30:35 crc kubenswrapper[4753]: I1205 17:30:35.835394 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-k9647" event={"ID":"304f4f1a-f42b-4904-9a77-9e26600eb591","Type":"ContainerStarted","Data":"799b5fa5f418c0326d70cfbd666f34609060fc2159bc16772c9a364b3e3b3efe"} Dec 05 17:30:35 crc kubenswrapper[4753]: I1205 17:30:35.836249 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-kj4zh" event={"ID":"2d850458-6add-4a44-b1c6-7dba1e8993ab","Type":"ContainerStarted","Data":"86267e300f43fcc25521ad9b802a118070c14e97179ffd5fc1fb4a016380e67c"} Dec 05 17:30:35 crc kubenswrapper[4753]: I1205 17:30:35.841684 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-9jsb5" event={"ID":"9712e3f3-fe07-4f19-b04f-6736375fd440","Type":"ContainerStarted","Data":"8810c0e91c512e01357adfb82ead88c1004741d359689cc7523dbc3bfd1041a6"} Dec 05 17:30:35 crc kubenswrapper[4753]: I1205 17:30:35.845354 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-pkzrf" event={"ID":"6afd28dd-749e-409b-93ec-30cd85573a95","Type":"ContainerStarted","Data":"55b0ebf160ca097cebf96c597501910271761c7aca99f308194d81d91c5c44e9"} Dec 05 17:30:35 crc kubenswrapper[4753]: I1205 17:30:35.849387 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-s6759"] Dec 05 17:30:35 crc kubenswrapper[4753]: W1205 17:30:35.881718 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod85fd7687_c296_460a_a2b2_3da36c97efe6.slice/crio-74f37256d556641f15971ec21d48a9f166ee4333d034422a11d44831064efeb1 WatchSource:0}: Error finding container 74f37256d556641f15971ec21d48a9f166ee4333d034422a11d44831064efeb1: Status 404 returned error can't find the container with id 74f37256d556641f15971ec21d48a9f166ee4333d034422a11d44831064efeb1 Dec 05 17:30:35 crc kubenswrapper[4753]: W1205 17:30:35.884581 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod09be61f0_7174_4035_a8f9_315ca512dea4.slice/crio-ff4d5ced68fdfc24de1424df75e18e68d2f5cba548a85317308bed59e3c91512 WatchSource:0}: Error finding container ff4d5ced68fdfc24de1424df75e18e68d2f5cba548a85317308bed59e3c91512: Status 404 returned error can't find the container with id ff4d5ced68fdfc24de1424df75e18e68d2f5cba548a85317308bed59e3c91512 Dec 05 17:30:35 crc kubenswrapper[4753]: I1205 17:30:35.885774 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-vcc5x"] Dec 05 17:30:35 crc kubenswrapper[4753]: I1205 17:30:35.892581 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-m5k72" Dec 05 17:30:35 crc kubenswrapper[4753]: I1205 17:30:35.897330 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-s2rgf"] Dec 05 17:30:36 crc kubenswrapper[4753]: I1205 17:30:36.053105 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-l5jmt"] Dec 05 17:30:36 crc kubenswrapper[4753]: I1205 17:30:36.064064 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-72b7n"] Dec 05 17:30:36 crc kubenswrapper[4753]: I1205 17:30:36.081946 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-dhjvc"] Dec 05 17:30:36 crc kubenswrapper[4753]: W1205 17:30:36.087037 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2f396259_4eaa_465d_9674_9999d750b1f6.slice/crio-7d72d04d3ebde8347cf7184b2d871474082e9f758a2c90ea67d90059bf375a5f WatchSource:0}: Error finding container 7d72d04d3ebde8347cf7184b2d871474082e9f758a2c90ea67d90059bf375a5f: Status 404 returned error can't find the container with id 7d72d04d3ebde8347cf7184b2d871474082e9f758a2c90ea67d90059bf375a5f Dec 05 17:30:36 crc kubenswrapper[4753]: I1205 17:30:36.094090 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-75c997498-r5zvk"] Dec 05 17:30:36 crc kubenswrapper[4753]: I1205 17:30:36.098667 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-8fdbn"] Dec 05 17:30:36 crc kubenswrapper[4753]: E1205 17:30:36.112222 4753 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:38.102.83.70:5001/openstack-k8s-operators/telemetry-operator:d41273755bc130d021645570cb35db3b5f04d199,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-jtz5q,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-75c997498-r5zvk_openstack-operators(41186805-7b90-44a5-b6d6-fe4b6b4d9a79): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 17:30:36 crc kubenswrapper[4753]: E1205 17:30:36.112343 4753 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-s4pr7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-697bc559fc-hf596_openstack-operators(a4590a35-52c3-45a7-ba18-81d2db73c384): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 17:30:36 crc kubenswrapper[4753]: E1205 17:30:36.112428 4753 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/glance-operator@sha256:abdb733b01e92ac17f565762f30f1d075b44c16421bd06e557f6bb3c319e1809,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-ppf8r,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-operator-controller-manager-77987cd8cd-g7c45_openstack-operators(e485d825-a020-45b2-a642-bba12e1a5112): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 17:30:36 crc kubenswrapper[4753]: I1205 17:30:36.115374 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-hf596"] Dec 05 17:30:36 crc kubenswrapper[4753]: E1205 17:30:36.116910 4753 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-ppf8r,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-operator-controller-manager-77987cd8cd-g7c45_openstack-operators(e485d825-a020-45b2-a642-bba12e1a5112): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 17:30:36 crc kubenswrapper[4753]: E1205 17:30:36.117028 4753 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-s4pr7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-697bc559fc-hf596_openstack-operators(a4590a35-52c3-45a7-ba18-81d2db73c384): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 17:30:36 crc kubenswrapper[4753]: E1205 17:30:36.117344 4753 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-jtz5q,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-75c997498-r5zvk_openstack-operators(41186805-7b90-44a5-b6d6-fe4b6b4d9a79): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 17:30:36 crc kubenswrapper[4753]: E1205 17:30:36.118644 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/telemetry-operator-controller-manager-75c997498-r5zvk" podUID="41186805-7b90-44a5-b6d6-fe4b6b4d9a79" Dec 05 17:30:36 crc kubenswrapper[4753]: E1205 17:30:36.118764 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-hf596" podUID="a4590a35-52c3-45a7-ba18-81d2db73c384" Dec 05 17:30:36 crc kubenswrapper[4753]: E1205 17:30:36.118844 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-g7c45" podUID="e485d825-a020-45b2-a642-bba12e1a5112" Dec 05 17:30:36 crc kubenswrapper[4753]: I1205 17:30:36.132090 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987cd8cd-g7c45"] Dec 05 17:30:36 crc kubenswrapper[4753]: I1205 17:30:36.247964 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/269f75ec-a232-47f3-8cc8-e9e4c8e9717d-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd462vzf\" (UID: \"269f75ec-a232-47f3-8cc8-e9e4c8e9717d\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd462vzf" Dec 05 17:30:36 crc kubenswrapper[4753]: E1205 17:30:36.248962 4753 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 17:30:36 crc kubenswrapper[4753]: E1205 17:30:36.249052 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/269f75ec-a232-47f3-8cc8-e9e4c8e9717d-cert podName:269f75ec-a232-47f3-8cc8-e9e4c8e9717d nodeName:}" failed. No retries permitted until 2025-12-05 17:30:38.249026797 +0000 UTC m=+1576.752133823 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/269f75ec-a232-47f3-8cc8-e9e4c8e9717d-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd462vzf" (UID: "269f75ec-a232-47f3-8cc8-e9e4c8e9717d") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 17:30:36 crc kubenswrapper[4753]: I1205 17:30:36.279226 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-zl7c6"] Dec 05 17:30:36 crc kubenswrapper[4753]: I1205 17:30:36.285972 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-qpd7t"] Dec 05 17:30:36 crc kubenswrapper[4753]: W1205 17:30:36.287788 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0d335585_bcd8_4ddf_a693_421d6d3bf6d2.slice/crio-55ac80cd13dbb5916910623994908f906b6bb4523503e410428c9620db80c46d WatchSource:0}: Error finding container 55ac80cd13dbb5916910623994908f906b6bb4523503e410428c9620db80c46d: Status 404 returned error can't find the container with id 55ac80cd13dbb5916910623994908f906b6bb4523503e410428c9620db80c46d Dec 05 17:30:36 crc kubenswrapper[4753]: W1205 17:30:36.298437 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcca913d2_4dbb_4cd8_8575_4af52cc95501.slice/crio-3dd63ed1aab22ad9918312512cf81e115a5394d78d3f486224fa2173a1fb12fe WatchSource:0}: Error finding container 3dd63ed1aab22ad9918312512cf81e115a5394d78d3f486224fa2173a1fb12fe: Status 404 returned error can't find the container with id 3dd63ed1aab22ad9918312512cf81e115a5394d78d3f486224fa2173a1fb12fe Dec 05 17:30:36 crc kubenswrapper[4753]: E1205 17:30:36.302229 4753 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-rzsdf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-668c99d594-zl7c6_openstack-operators(cca913d2-4dbb-4cd8-8575-4af52cc95501): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 17:30:36 crc kubenswrapper[4753]: E1205 17:30:36.303595 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-zl7c6" podUID="cca913d2-4dbb-4cd8-8575-4af52cc95501" Dec 05 17:30:36 crc kubenswrapper[4753]: I1205 17:30:36.306540 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-7wzfq"] Dec 05 17:30:36 crc kubenswrapper[4753]: I1205 17:30:36.333425 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-p5j4k"] Dec 05 17:30:36 crc kubenswrapper[4753]: W1205 17:30:36.340002 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod444bb95c_a503_40a1_a99e_64d04b3c8930.slice/crio-d92f728be28a91455776a209f93c8b30c46595cdacd3915b609d62a5203d8051 WatchSource:0}: Error finding container d92f728be28a91455776a209f93c8b30c46595cdacd3915b609d62a5203d8051: Status 404 returned error can't find the container with id d92f728be28a91455776a209f93c8b30c46595cdacd3915b609d62a5203d8051 Dec 05 17:30:36 crc kubenswrapper[4753]: E1205 17:30:36.346119 4753 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-m5jg9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5854674fcc-p5j4k_openstack-operators(444bb95c-a503-40a1-a99e-64d04b3c8930): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 17:30:36 crc kubenswrapper[4753]: E1205 17:30:36.349687 4753 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-m5jg9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5854674fcc-p5j4k_openstack-operators(444bb95c-a503-40a1-a99e-64d04b3c8930): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 17:30:36 crc kubenswrapper[4753]: E1205 17:30:36.351116 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-p5j4k" podUID="444bb95c-a503-40a1-a99e-64d04b3c8930" Dec 05 17:30:36 crc kubenswrapper[4753]: I1205 17:30:36.655603 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ac0841fe-3cba-4397-bfec-67a9cbec6861-metrics-certs\") pod \"openstack-operator-controller-manager-6dd6c4f769-lq2pn\" (UID: \"ac0841fe-3cba-4397-bfec-67a9cbec6861\") " pod="openstack-operators/openstack-operator-controller-manager-6dd6c4f769-lq2pn" Dec 05 17:30:36 crc kubenswrapper[4753]: I1205 17:30:36.655724 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/ac0841fe-3cba-4397-bfec-67a9cbec6861-webhook-certs\") pod \"openstack-operator-controller-manager-6dd6c4f769-lq2pn\" (UID: \"ac0841fe-3cba-4397-bfec-67a9cbec6861\") " pod="openstack-operators/openstack-operator-controller-manager-6dd6c4f769-lq2pn" Dec 05 17:30:36 crc kubenswrapper[4753]: E1205 17:30:36.655935 4753 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 05 17:30:36 crc kubenswrapper[4753]: E1205 17:30:36.656058 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ac0841fe-3cba-4397-bfec-67a9cbec6861-webhook-certs podName:ac0841fe-3cba-4397-bfec-67a9cbec6861 nodeName:}" failed. No retries permitted until 2025-12-05 17:30:38.656003132 +0000 UTC m=+1577.159110138 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/ac0841fe-3cba-4397-bfec-67a9cbec6861-webhook-certs") pod "openstack-operator-controller-manager-6dd6c4f769-lq2pn" (UID: "ac0841fe-3cba-4397-bfec-67a9cbec6861") : secret "webhook-server-cert" not found Dec 05 17:30:36 crc kubenswrapper[4753]: E1205 17:30:36.656325 4753 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 05 17:30:36 crc kubenswrapper[4753]: E1205 17:30:36.656368 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ac0841fe-3cba-4397-bfec-67a9cbec6861-metrics-certs podName:ac0841fe-3cba-4397-bfec-67a9cbec6861 nodeName:}" failed. No retries permitted until 2025-12-05 17:30:38.656356732 +0000 UTC m=+1577.159463738 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/ac0841fe-3cba-4397-bfec-67a9cbec6861-metrics-certs") pod "openstack-operator-controller-manager-6dd6c4f769-lq2pn" (UID: "ac0841fe-3cba-4397-bfec-67a9cbec6861") : secret "metrics-server-cert" not found Dec 05 17:30:36 crc kubenswrapper[4753]: I1205 17:30:36.864927 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-qpd7t" event={"ID":"0d335585-bcd8-4ddf-a693-421d6d3bf6d2","Type":"ContainerStarted","Data":"55ac80cd13dbb5916910623994908f906b6bb4523503e410428c9620db80c46d"} Dec 05 17:30:36 crc kubenswrapper[4753]: I1205 17:30:36.871360 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-8fdbn" event={"ID":"9e8cbdbf-2604-4e04-a56a-6f2175c09abe","Type":"ContainerStarted","Data":"d813395a5508462d54387e3e125bca13a8c6c63f111aab7762ce09a7fe9691c7"} Dec 05 17:30:36 crc kubenswrapper[4753]: I1205 17:30:36.875403 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-s2rgf" event={"ID":"85fd7687-c296-460a-a2b2-3da36c97efe6","Type":"ContainerStarted","Data":"74f37256d556641f15971ec21d48a9f166ee4333d034422a11d44831064efeb1"} Dec 05 17:30:36 crc kubenswrapper[4753]: I1205 17:30:36.876535 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-g7c45" event={"ID":"e485d825-a020-45b2-a642-bba12e1a5112","Type":"ContainerStarted","Data":"c038df8c2eafeda2b35bfdc9f8edc9ec232a7277a87368d3389eba2ab6eab548"} Dec 05 17:30:36 crc kubenswrapper[4753]: E1205 17:30:36.878559 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/glance-operator@sha256:abdb733b01e92ac17f565762f30f1d075b44c16421bd06e557f6bb3c319e1809\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-g7c45" podUID="e485d825-a020-45b2-a642-bba12e1a5112" Dec 05 17:30:36 crc kubenswrapper[4753]: I1205 17:30:36.879009 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-l5jmt" event={"ID":"ebc7e3cf-3701-42f9-a6ca-43f11424a0b3","Type":"ContainerStarted","Data":"1c8fbde6897591a9327053cd2ef6eede9068b142dda2ff677556fc713bc99871"} Dec 05 17:30:36 crc kubenswrapper[4753]: I1205 17:30:36.880808 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-dhjvc" event={"ID":"8e53ed9e-05c6-4a84-894d-85f427a53f72","Type":"ContainerStarted","Data":"cdbb792a35a7355acd7a6a7ce6c3826f76231e021c4db9c3acf1e59b845d841e"} Dec 05 17:30:36 crc kubenswrapper[4753]: I1205 17:30:36.883867 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-s6759" event={"ID":"a3db9b5b-a4b2-40f5-93a0-84ecb72c1c2e","Type":"ContainerStarted","Data":"125c62ffc61dfb2ff7eaee7c6f4a6b03f89dcdc9e0b889d57cacee849580e95b"} Dec 05 17:30:36 crc kubenswrapper[4753]: I1205 17:30:36.886359 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-vcc5x" event={"ID":"09be61f0-7174-4035-a8f9-315ca512dea4","Type":"ContainerStarted","Data":"ff4d5ced68fdfc24de1424df75e18e68d2f5cba548a85317308bed59e3c91512"} Dec 05 17:30:36 crc kubenswrapper[4753]: I1205 17:30:36.889103 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-7wzfq" event={"ID":"7b847f69-4008-4acf-bddf-e0ea5a07b6bd","Type":"ContainerStarted","Data":"068d12fc8051ef9de7cab748b79e20904354db4b81707d17c1072717a593f11f"} Dec 05 17:30:36 crc kubenswrapper[4753]: I1205 17:30:36.891113 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-zl7c6" event={"ID":"cca913d2-4dbb-4cd8-8575-4af52cc95501","Type":"ContainerStarted","Data":"3dd63ed1aab22ad9918312512cf81e115a5394d78d3f486224fa2173a1fb12fe"} Dec 05 17:30:36 crc kubenswrapper[4753]: E1205 17:30:36.893121 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-zl7c6" podUID="cca913d2-4dbb-4cd8-8575-4af52cc95501" Dec 05 17:30:36 crc kubenswrapper[4753]: I1205 17:30:36.905332 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-72b7n" event={"ID":"2f396259-4eaa-465d-9674-9999d750b1f6","Type":"ContainerStarted","Data":"7d72d04d3ebde8347cf7184b2d871474082e9f758a2c90ea67d90059bf375a5f"} Dec 05 17:30:36 crc kubenswrapper[4753]: I1205 17:30:36.907053 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-p5j4k" event={"ID":"444bb95c-a503-40a1-a99e-64d04b3c8930","Type":"ContainerStarted","Data":"d92f728be28a91455776a209f93c8b30c46595cdacd3915b609d62a5203d8051"} Dec 05 17:30:36 crc kubenswrapper[4753]: E1205 17:30:36.909056 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-p5j4k" podUID="444bb95c-a503-40a1-a99e-64d04b3c8930" Dec 05 17:30:36 crc kubenswrapper[4753]: I1205 17:30:36.909205 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-hf596" event={"ID":"a4590a35-52c3-45a7-ba18-81d2db73c384","Type":"ContainerStarted","Data":"030161fa1ad183f6cb076eab3edf7d706e04af033eabc407d3af9fe6397769c7"} Dec 05 17:30:36 crc kubenswrapper[4753]: E1205 17:30:36.912445 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-hf596" podUID="a4590a35-52c3-45a7-ba18-81d2db73c384" Dec 05 17:30:36 crc kubenswrapper[4753]: I1205 17:30:36.915129 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-75c997498-r5zvk" event={"ID":"41186805-7b90-44a5-b6d6-fe4b6b4d9a79","Type":"ContainerStarted","Data":"47e45aef0932d27d1e66f305eec8c1f33f03c34e3f8136736e17e7aefde322c9"} Dec 05 17:30:36 crc kubenswrapper[4753]: E1205 17:30:36.919793 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.70:5001/openstack-k8s-operators/telemetry-operator:d41273755bc130d021645570cb35db3b5f04d199\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/telemetry-operator-controller-manager-75c997498-r5zvk" podUID="41186805-7b90-44a5-b6d6-fe4b6b4d9a79" Dec 05 17:30:37 crc kubenswrapper[4753]: I1205 17:30:37.785756 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e2bab632-3631-4dcf-b337-12982b375999-cert\") pod \"infra-operator-controller-manager-57548d458d-j6jtq\" (UID: \"e2bab632-3631-4dcf-b337-12982b375999\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-j6jtq" Dec 05 17:30:37 crc kubenswrapper[4753]: E1205 17:30:37.785933 4753 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 05 17:30:37 crc kubenswrapper[4753]: E1205 17:30:37.786688 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e2bab632-3631-4dcf-b337-12982b375999-cert podName:e2bab632-3631-4dcf-b337-12982b375999 nodeName:}" failed. No retries permitted until 2025-12-05 17:30:41.786669268 +0000 UTC m=+1580.289776274 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/e2bab632-3631-4dcf-b337-12982b375999-cert") pod "infra-operator-controller-manager-57548d458d-j6jtq" (UID: "e2bab632-3631-4dcf-b337-12982b375999") : secret "infra-operator-webhook-server-cert" not found Dec 05 17:30:37 crc kubenswrapper[4753]: E1205 17:30:37.932566 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-hf596" podUID="a4590a35-52c3-45a7-ba18-81d2db73c384" Dec 05 17:30:37 crc kubenswrapper[4753]: E1205 17:30:37.933045 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-zl7c6" podUID="cca913d2-4dbb-4cd8-8575-4af52cc95501" Dec 05 17:30:37 crc kubenswrapper[4753]: E1205 17:30:37.933570 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/glance-operator@sha256:abdb733b01e92ac17f565762f30f1d075b44c16421bd06e557f6bb3c319e1809\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-g7c45" podUID="e485d825-a020-45b2-a642-bba12e1a5112" Dec 05 17:30:37 crc kubenswrapper[4753]: E1205 17:30:37.933837 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.70:5001/openstack-k8s-operators/telemetry-operator:d41273755bc130d021645570cb35db3b5f04d199\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/telemetry-operator-controller-manager-75c997498-r5zvk" podUID="41186805-7b90-44a5-b6d6-fe4b6b4d9a79" Dec 05 17:30:37 crc kubenswrapper[4753]: E1205 17:30:37.934000 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-p5j4k" podUID="444bb95c-a503-40a1-a99e-64d04b3c8930" Dec 05 17:30:38 crc kubenswrapper[4753]: I1205 17:30:38.173472 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-m5k72"] Dec 05 17:30:38 crc kubenswrapper[4753]: I1205 17:30:38.173761 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-m5k72" podUID="df9c3114-8e90-4129-9a46-a99986adccd2" containerName="registry-server" containerID="cri-o://c689a1cd33fe12943f8b3e11b3e57d99d3933761822e66e9f7c51f6ad917d407" gracePeriod=2 Dec 05 17:30:38 crc kubenswrapper[4753]: I1205 17:30:38.300521 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/269f75ec-a232-47f3-8cc8-e9e4c8e9717d-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd462vzf\" (UID: \"269f75ec-a232-47f3-8cc8-e9e4c8e9717d\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd462vzf" Dec 05 17:30:38 crc kubenswrapper[4753]: E1205 17:30:38.300776 4753 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 17:30:38 crc kubenswrapper[4753]: E1205 17:30:38.300909 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/269f75ec-a232-47f3-8cc8-e9e4c8e9717d-cert podName:269f75ec-a232-47f3-8cc8-e9e4c8e9717d nodeName:}" failed. No retries permitted until 2025-12-05 17:30:42.300879599 +0000 UTC m=+1580.803986665 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/269f75ec-a232-47f3-8cc8-e9e4c8e9717d-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd462vzf" (UID: "269f75ec-a232-47f3-8cc8-e9e4c8e9717d") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 17:30:38 crc kubenswrapper[4753]: I1205 17:30:38.707388 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ac0841fe-3cba-4397-bfec-67a9cbec6861-metrics-certs\") pod \"openstack-operator-controller-manager-6dd6c4f769-lq2pn\" (UID: \"ac0841fe-3cba-4397-bfec-67a9cbec6861\") " pod="openstack-operators/openstack-operator-controller-manager-6dd6c4f769-lq2pn" Dec 05 17:30:38 crc kubenswrapper[4753]: I1205 17:30:38.707456 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/ac0841fe-3cba-4397-bfec-67a9cbec6861-webhook-certs\") pod \"openstack-operator-controller-manager-6dd6c4f769-lq2pn\" (UID: \"ac0841fe-3cba-4397-bfec-67a9cbec6861\") " pod="openstack-operators/openstack-operator-controller-manager-6dd6c4f769-lq2pn" Dec 05 17:30:38 crc kubenswrapper[4753]: E1205 17:30:38.707631 4753 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 05 17:30:38 crc kubenswrapper[4753]: E1205 17:30:38.707678 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ac0841fe-3cba-4397-bfec-67a9cbec6861-webhook-certs podName:ac0841fe-3cba-4397-bfec-67a9cbec6861 nodeName:}" failed. No retries permitted until 2025-12-05 17:30:42.707664588 +0000 UTC m=+1581.210771594 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/ac0841fe-3cba-4397-bfec-67a9cbec6861-webhook-certs") pod "openstack-operator-controller-manager-6dd6c4f769-lq2pn" (UID: "ac0841fe-3cba-4397-bfec-67a9cbec6861") : secret "webhook-server-cert" not found Dec 05 17:30:38 crc kubenswrapper[4753]: E1205 17:30:38.707976 4753 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 05 17:30:38 crc kubenswrapper[4753]: E1205 17:30:38.708002 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ac0841fe-3cba-4397-bfec-67a9cbec6861-metrics-certs podName:ac0841fe-3cba-4397-bfec-67a9cbec6861 nodeName:}" failed. No retries permitted until 2025-12-05 17:30:42.707993878 +0000 UTC m=+1581.211100874 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/ac0841fe-3cba-4397-bfec-67a9cbec6861-metrics-certs") pod "openstack-operator-controller-manager-6dd6c4f769-lq2pn" (UID: "ac0841fe-3cba-4397-bfec-67a9cbec6861") : secret "metrics-server-cert" not found Dec 05 17:30:38 crc kubenswrapper[4753]: I1205 17:30:38.940317 4753 generic.go:334] "Generic (PLEG): container finished" podID="df9c3114-8e90-4129-9a46-a99986adccd2" containerID="c689a1cd33fe12943f8b3e11b3e57d99d3933761822e66e9f7c51f6ad917d407" exitCode=0 Dec 05 17:30:38 crc kubenswrapper[4753]: I1205 17:30:38.940363 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m5k72" event={"ID":"df9c3114-8e90-4129-9a46-a99986adccd2","Type":"ContainerDied","Data":"c689a1cd33fe12943f8b3e11b3e57d99d3933761822e66e9f7c51f6ad917d407"} Dec 05 17:30:41 crc kubenswrapper[4753]: I1205 17:30:41.881180 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e2bab632-3631-4dcf-b337-12982b375999-cert\") pod \"infra-operator-controller-manager-57548d458d-j6jtq\" (UID: \"e2bab632-3631-4dcf-b337-12982b375999\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-j6jtq" Dec 05 17:30:41 crc kubenswrapper[4753]: E1205 17:30:41.881377 4753 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 05 17:30:41 crc kubenswrapper[4753]: E1205 17:30:41.881843 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e2bab632-3631-4dcf-b337-12982b375999-cert podName:e2bab632-3631-4dcf-b337-12982b375999 nodeName:}" failed. No retries permitted until 2025-12-05 17:30:49.881819281 +0000 UTC m=+1588.384926297 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/e2bab632-3631-4dcf-b337-12982b375999-cert") pod "infra-operator-controller-manager-57548d458d-j6jtq" (UID: "e2bab632-3631-4dcf-b337-12982b375999") : secret "infra-operator-webhook-server-cert" not found Dec 05 17:30:42 crc kubenswrapper[4753]: I1205 17:30:42.402113 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/269f75ec-a232-47f3-8cc8-e9e4c8e9717d-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd462vzf\" (UID: \"269f75ec-a232-47f3-8cc8-e9e4c8e9717d\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd462vzf" Dec 05 17:30:42 crc kubenswrapper[4753]: E1205 17:30:42.402512 4753 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 17:30:42 crc kubenswrapper[4753]: E1205 17:30:42.402569 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/269f75ec-a232-47f3-8cc8-e9e4c8e9717d-cert podName:269f75ec-a232-47f3-8cc8-e9e4c8e9717d nodeName:}" failed. No retries permitted until 2025-12-05 17:30:50.402552386 +0000 UTC m=+1588.905659402 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/269f75ec-a232-47f3-8cc8-e9e4c8e9717d-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd462vzf" (UID: "269f75ec-a232-47f3-8cc8-e9e4c8e9717d") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 17:30:42 crc kubenswrapper[4753]: I1205 17:30:42.710252 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ac0841fe-3cba-4397-bfec-67a9cbec6861-metrics-certs\") pod \"openstack-operator-controller-manager-6dd6c4f769-lq2pn\" (UID: \"ac0841fe-3cba-4397-bfec-67a9cbec6861\") " pod="openstack-operators/openstack-operator-controller-manager-6dd6c4f769-lq2pn" Dec 05 17:30:42 crc kubenswrapper[4753]: I1205 17:30:42.710356 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/ac0841fe-3cba-4397-bfec-67a9cbec6861-webhook-certs\") pod \"openstack-operator-controller-manager-6dd6c4f769-lq2pn\" (UID: \"ac0841fe-3cba-4397-bfec-67a9cbec6861\") " pod="openstack-operators/openstack-operator-controller-manager-6dd6c4f769-lq2pn" Dec 05 17:30:42 crc kubenswrapper[4753]: E1205 17:30:42.710424 4753 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 05 17:30:42 crc kubenswrapper[4753]: E1205 17:30:42.710487 4753 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 05 17:30:42 crc kubenswrapper[4753]: E1205 17:30:42.710497 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ac0841fe-3cba-4397-bfec-67a9cbec6861-metrics-certs podName:ac0841fe-3cba-4397-bfec-67a9cbec6861 nodeName:}" failed. No retries permitted until 2025-12-05 17:30:50.710479306 +0000 UTC m=+1589.213586312 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/ac0841fe-3cba-4397-bfec-67a9cbec6861-metrics-certs") pod "openstack-operator-controller-manager-6dd6c4f769-lq2pn" (UID: "ac0841fe-3cba-4397-bfec-67a9cbec6861") : secret "metrics-server-cert" not found Dec 05 17:30:42 crc kubenswrapper[4753]: E1205 17:30:42.710524 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ac0841fe-3cba-4397-bfec-67a9cbec6861-webhook-certs podName:ac0841fe-3cba-4397-bfec-67a9cbec6861 nodeName:}" failed. No retries permitted until 2025-12-05 17:30:50.710514047 +0000 UTC m=+1589.213621043 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/ac0841fe-3cba-4397-bfec-67a9cbec6861-webhook-certs") pod "openstack-operator-controller-manager-6dd6c4f769-lq2pn" (UID: "ac0841fe-3cba-4397-bfec-67a9cbec6861") : secret "webhook-server-cert" not found Dec 05 17:30:45 crc kubenswrapper[4753]: E1205 17:30:45.729287 4753 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of c689a1cd33fe12943f8b3e11b3e57d99d3933761822e66e9f7c51f6ad917d407 is running failed: container process not found" containerID="c689a1cd33fe12943f8b3e11b3e57d99d3933761822e66e9f7c51f6ad917d407" cmd=["grpc_health_probe","-addr=:50051"] Dec 05 17:30:45 crc kubenswrapper[4753]: E1205 17:30:45.730121 4753 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of c689a1cd33fe12943f8b3e11b3e57d99d3933761822e66e9f7c51f6ad917d407 is running failed: container process not found" containerID="c689a1cd33fe12943f8b3e11b3e57d99d3933761822e66e9f7c51f6ad917d407" cmd=["grpc_health_probe","-addr=:50051"] Dec 05 17:30:45 crc kubenswrapper[4753]: E1205 17:30:45.730524 4753 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of c689a1cd33fe12943f8b3e11b3e57d99d3933761822e66e9f7c51f6ad917d407 is running failed: container process not found" containerID="c689a1cd33fe12943f8b3e11b3e57d99d3933761822e66e9f7c51f6ad917d407" cmd=["grpc_health_probe","-addr=:50051"] Dec 05 17:30:45 crc kubenswrapper[4753]: E1205 17:30:45.730556 4753 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of c689a1cd33fe12943f8b3e11b3e57d99d3933761822e66e9f7c51f6ad917d407 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/certified-operators-m5k72" podUID="df9c3114-8e90-4129-9a46-a99986adccd2" containerName="registry-server" Dec 05 17:30:48 crc kubenswrapper[4753]: E1205 17:30:48.458355 4753 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/barbican-operator@sha256:f6059a0fbf031d34dcf086d14ce8c0546caeaee23c5780e90b5037c5feee9fea" Dec 05 17:30:48 crc kubenswrapper[4753]: E1205 17:30:48.459679 4753 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/barbican-operator@sha256:f6059a0fbf031d34dcf086d14ce8c0546caeaee23c5780e90b5037c5feee9fea,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-dxgzt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-operator-controller-manager-7d9dfd778-k9647_openstack-operators(304f4f1a-f42b-4904-9a77-9e26600eb591): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 17:30:49 crc kubenswrapper[4753]: I1205 17:30:49.962374 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e2bab632-3631-4dcf-b337-12982b375999-cert\") pod \"infra-operator-controller-manager-57548d458d-j6jtq\" (UID: \"e2bab632-3631-4dcf-b337-12982b375999\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-j6jtq" Dec 05 17:30:49 crc kubenswrapper[4753]: I1205 17:30:49.973619 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e2bab632-3631-4dcf-b337-12982b375999-cert\") pod \"infra-operator-controller-manager-57548d458d-j6jtq\" (UID: \"e2bab632-3631-4dcf-b337-12982b375999\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-j6jtq" Dec 05 17:30:50 crc kubenswrapper[4753]: I1205 17:30:50.144787 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-57548d458d-j6jtq" Dec 05 17:30:50 crc kubenswrapper[4753]: I1205 17:30:50.473242 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/269f75ec-a232-47f3-8cc8-e9e4c8e9717d-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd462vzf\" (UID: \"269f75ec-a232-47f3-8cc8-e9e4c8e9717d\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd462vzf" Dec 05 17:30:50 crc kubenswrapper[4753]: I1205 17:30:50.480443 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/269f75ec-a232-47f3-8cc8-e9e4c8e9717d-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd462vzf\" (UID: \"269f75ec-a232-47f3-8cc8-e9e4c8e9717d\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd462vzf" Dec 05 17:30:50 crc kubenswrapper[4753]: I1205 17:30:50.496558 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd462vzf" Dec 05 17:30:50 crc kubenswrapper[4753]: I1205 17:30:50.779199 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ac0841fe-3cba-4397-bfec-67a9cbec6861-metrics-certs\") pod \"openstack-operator-controller-manager-6dd6c4f769-lq2pn\" (UID: \"ac0841fe-3cba-4397-bfec-67a9cbec6861\") " pod="openstack-operators/openstack-operator-controller-manager-6dd6c4f769-lq2pn" Dec 05 17:30:50 crc kubenswrapper[4753]: I1205 17:30:50.779363 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/ac0841fe-3cba-4397-bfec-67a9cbec6861-webhook-certs\") pod \"openstack-operator-controller-manager-6dd6c4f769-lq2pn\" (UID: \"ac0841fe-3cba-4397-bfec-67a9cbec6861\") " pod="openstack-operators/openstack-operator-controller-manager-6dd6c4f769-lq2pn" Dec 05 17:30:50 crc kubenswrapper[4753]: E1205 17:30:50.779657 4753 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 05 17:30:50 crc kubenswrapper[4753]: E1205 17:30:50.779768 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ac0841fe-3cba-4397-bfec-67a9cbec6861-webhook-certs podName:ac0841fe-3cba-4397-bfec-67a9cbec6861 nodeName:}" failed. No retries permitted until 2025-12-05 17:31:06.779732115 +0000 UTC m=+1605.282839161 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/ac0841fe-3cba-4397-bfec-67a9cbec6861-webhook-certs") pod "openstack-operator-controller-manager-6dd6c4f769-lq2pn" (UID: "ac0841fe-3cba-4397-bfec-67a9cbec6861") : secret "webhook-server-cert" not found Dec 05 17:30:50 crc kubenswrapper[4753]: E1205 17:30:50.780219 4753 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 05 17:30:50 crc kubenswrapper[4753]: E1205 17:30:50.780333 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ac0841fe-3cba-4397-bfec-67a9cbec6861-metrics-certs podName:ac0841fe-3cba-4397-bfec-67a9cbec6861 nodeName:}" failed. No retries permitted until 2025-12-05 17:31:06.780305392 +0000 UTC m=+1605.283412478 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/ac0841fe-3cba-4397-bfec-67a9cbec6861-metrics-certs") pod "openstack-operator-controller-manager-6dd6c4f769-lq2pn" (UID: "ac0841fe-3cba-4397-bfec-67a9cbec6861") : secret "metrics-server-cert" not found Dec 05 17:30:53 crc kubenswrapper[4753]: E1205 17:30:53.410491 4753 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/placement-operator@sha256:d29650b006da97eb9178fcc58f2eb9fead8c2b414fac18f86a3c3a1507488c4f" Dec 05 17:30:53 crc kubenswrapper[4753]: E1205 17:30:53.411103 4753 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/placement-operator@sha256:d29650b006da97eb9178fcc58f2eb9fead8c2b414fac18f86a3c3a1507488c4f,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-6vvzf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-78f8948974-l5jmt_openstack-operators(ebc7e3cf-3701-42f9-a6ca-43f11424a0b3): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 17:30:54 crc kubenswrapper[4753]: E1205 17:30:54.134449 4753 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/keystone-operator@sha256:72ad6517987f674af0d0ae092cbb874aeae909c8b8b60188099c311762ebc8f7" Dec 05 17:30:54 crc kubenswrapper[4753]: E1205 17:30:54.134973 4753 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/keystone-operator@sha256:72ad6517987f674af0d0ae092cbb874aeae909c8b8b60188099c311762ebc8f7,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-bb7qr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-7765d96ddf-s6759_openstack-operators(a3db9b5b-a4b2-40f5-93a0-84ecb72c1c2e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 17:30:54 crc kubenswrapper[4753]: I1205 17:30:54.166875 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-m5k72" Dec 05 17:30:54 crc kubenswrapper[4753]: I1205 17:30:54.333534 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mdg4z\" (UniqueName: \"kubernetes.io/projected/df9c3114-8e90-4129-9a46-a99986adccd2-kube-api-access-mdg4z\") pod \"df9c3114-8e90-4129-9a46-a99986adccd2\" (UID: \"df9c3114-8e90-4129-9a46-a99986adccd2\") " Dec 05 17:30:54 crc kubenswrapper[4753]: I1205 17:30:54.333677 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df9c3114-8e90-4129-9a46-a99986adccd2-catalog-content\") pod \"df9c3114-8e90-4129-9a46-a99986adccd2\" (UID: \"df9c3114-8e90-4129-9a46-a99986adccd2\") " Dec 05 17:30:54 crc kubenswrapper[4753]: I1205 17:30:54.333744 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df9c3114-8e90-4129-9a46-a99986adccd2-utilities\") pod \"df9c3114-8e90-4129-9a46-a99986adccd2\" (UID: \"df9c3114-8e90-4129-9a46-a99986adccd2\") " Dec 05 17:30:54 crc kubenswrapper[4753]: I1205 17:30:54.334578 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/df9c3114-8e90-4129-9a46-a99986adccd2-utilities" (OuterVolumeSpecName: "utilities") pod "df9c3114-8e90-4129-9a46-a99986adccd2" (UID: "df9c3114-8e90-4129-9a46-a99986adccd2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:30:54 crc kubenswrapper[4753]: I1205 17:30:54.339436 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/df9c3114-8e90-4129-9a46-a99986adccd2-kube-api-access-mdg4z" (OuterVolumeSpecName: "kube-api-access-mdg4z") pod "df9c3114-8e90-4129-9a46-a99986adccd2" (UID: "df9c3114-8e90-4129-9a46-a99986adccd2"). InnerVolumeSpecName "kube-api-access-mdg4z". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:30:54 crc kubenswrapper[4753]: I1205 17:30:54.382669 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/df9c3114-8e90-4129-9a46-a99986adccd2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "df9c3114-8e90-4129-9a46-a99986adccd2" (UID: "df9c3114-8e90-4129-9a46-a99986adccd2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:30:54 crc kubenswrapper[4753]: I1205 17:30:54.435272 4753 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df9c3114-8e90-4129-9a46-a99986adccd2-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 17:30:54 crc kubenswrapper[4753]: I1205 17:30:54.435305 4753 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df9c3114-8e90-4129-9a46-a99986adccd2-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 17:30:54 crc kubenswrapper[4753]: I1205 17:30:54.435314 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mdg4z\" (UniqueName: \"kubernetes.io/projected/df9c3114-8e90-4129-9a46-a99986adccd2-kube-api-access-mdg4z\") on node \"crc\" DevicePath \"\"" Dec 05 17:30:55 crc kubenswrapper[4753]: I1205 17:30:55.075373 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m5k72" event={"ID":"df9c3114-8e90-4129-9a46-a99986adccd2","Type":"ContainerDied","Data":"7b025a3c89c3c8e0d562ca27498bc316a39a1d6085ecff0e10449ae9c896b4db"} Dec 05 17:30:55 crc kubenswrapper[4753]: I1205 17:30:55.075438 4753 scope.go:117] "RemoveContainer" containerID="c689a1cd33fe12943f8b3e11b3e57d99d3933761822e66e9f7c51f6ad917d407" Dec 05 17:30:55 crc kubenswrapper[4753]: I1205 17:30:55.075523 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-m5k72" Dec 05 17:30:55 crc kubenswrapper[4753]: I1205 17:30:55.131785 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-m5k72"] Dec 05 17:30:55 crc kubenswrapper[4753]: I1205 17:30:55.141853 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-m5k72"] Dec 05 17:30:55 crc kubenswrapper[4753]: I1205 17:30:55.736495 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="df9c3114-8e90-4129-9a46-a99986adccd2" path="/var/lib/kubelet/pods/df9c3114-8e90-4129-9a46-a99986adccd2/volumes" Dec 05 17:31:02 crc kubenswrapper[4753]: I1205 17:31:02.624655 4753 scope.go:117] "RemoveContainer" containerID="ab650f8aef02aef873579bf857d2aa989650268fb9aedf6a7a768955b063961a" Dec 05 17:31:02 crc kubenswrapper[4753]: I1205 17:31:02.857108 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-j6jtq"] Dec 05 17:31:04 crc kubenswrapper[4753]: E1205 17:31:04.293102 4753 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670" Dec 05 17:31:04 crc kubenswrapper[4753]: E1205 17:31:04.293320 4753 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-s4pr7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-697bc559fc-hf596_openstack-operators(a4590a35-52c3-45a7-ba18-81d2db73c384): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 17:31:05 crc kubenswrapper[4753]: E1205 17:31:05.358911 4753 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.70:5001/openstack-k8s-operators/telemetry-operator:d41273755bc130d021645570cb35db3b5f04d199" Dec 05 17:31:05 crc kubenswrapper[4753]: E1205 17:31:05.359333 4753 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="38.102.83.70:5001/openstack-k8s-operators/telemetry-operator:d41273755bc130d021645570cb35db3b5f04d199" Dec 05 17:31:05 crc kubenswrapper[4753]: E1205 17:31:05.359537 4753 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:38.102.83.70:5001/openstack-k8s-operators/telemetry-operator:d41273755bc130d021645570cb35db3b5f04d199,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-jtz5q,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-75c997498-r5zvk_openstack-operators(41186805-7b90-44a5-b6d6-fe4b6b4d9a79): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 17:31:05 crc kubenswrapper[4753]: I1205 17:31:05.589628 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd462vzf"] Dec 05 17:31:05 crc kubenswrapper[4753]: I1205 17:31:05.714003 4753 scope.go:117] "RemoveContainer" containerID="9d1084642e571e7a3618ec0ca85873d1a77efa55b6652003dffe2918cb33dba0" Dec 05 17:31:05 crc kubenswrapper[4753]: E1205 17:31:05.726577 4753 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2" Dec 05 17:31:05 crc kubenswrapper[4753]: E1205 17:31:05.726784 4753 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-rzsdf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-668c99d594-zl7c6_openstack-operators(cca913d2-4dbb-4cd8-8575-4af52cc95501): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 17:31:05 crc kubenswrapper[4753]: E1205 17:31:05.728280 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-zl7c6" podUID="cca913d2-4dbb-4cd8-8575-4af52cc95501" Dec 05 17:31:05 crc kubenswrapper[4753]: W1205 17:31:05.750650 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod269f75ec_a232_47f3_8cc8_e9e4c8e9717d.slice/crio-53e31ebd870c5f4fcb35ccd6ef9270a4a47d536154484f9416616ffdaa95d63c WatchSource:0}: Error finding container 53e31ebd870c5f4fcb35ccd6ef9270a4a47d536154484f9416616ffdaa95d63c: Status 404 returned error can't find the container with id 53e31ebd870c5f4fcb35ccd6ef9270a4a47d536154484f9416616ffdaa95d63c Dec 05 17:31:06 crc kubenswrapper[4753]: I1205 17:31:06.164973 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-dhjvc" event={"ID":"8e53ed9e-05c6-4a84-894d-85f427a53f72","Type":"ContainerStarted","Data":"5f9f50650d6424709533df7b08b0c8a4f8d58faa7822ee6e35424d164211678a"} Dec 05 17:31:06 crc kubenswrapper[4753]: I1205 17:31:06.167985 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-j6jtq" event={"ID":"e2bab632-3631-4dcf-b337-12982b375999","Type":"ContainerStarted","Data":"0db2e8406462017500caa6f39c3242b1065a0223810614bdfcdf0ddb3dbfe197"} Dec 05 17:31:06 crc kubenswrapper[4753]: I1205 17:31:06.175667 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-kj4zh" event={"ID":"2d850458-6add-4a44-b1c6-7dba1e8993ab","Type":"ContainerStarted","Data":"e39c64809238d9988c1f11f33ab85c3ea05d20e4f77f99e555a98970cf47a9a6"} Dec 05 17:31:06 crc kubenswrapper[4753]: I1205 17:31:06.182343 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-vcc5x" event={"ID":"09be61f0-7174-4035-a8f9-315ca512dea4","Type":"ContainerStarted","Data":"38d5a7f5b07f1177e79aa34da4748dd2b88bf4bc60f12e16fb5165dc15bf5c07"} Dec 05 17:31:06 crc kubenswrapper[4753]: I1205 17:31:06.183790 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-pkzrf" event={"ID":"6afd28dd-749e-409b-93ec-30cd85573a95","Type":"ContainerStarted","Data":"ba0706f631d07bdcc546969165fe5a6e64b0bf18a400f34210059326a4e417b8"} Dec 05 17:31:06 crc kubenswrapper[4753]: I1205 17:31:06.184957 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd462vzf" event={"ID":"269f75ec-a232-47f3-8cc8-e9e4c8e9717d","Type":"ContainerStarted","Data":"53e31ebd870c5f4fcb35ccd6ef9270a4a47d536154484f9416616ffdaa95d63c"} Dec 05 17:31:06 crc kubenswrapper[4753]: I1205 17:31:06.204674 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-hhh6x" event={"ID":"a5f4bc41-be86-43bd-b9af-d8d8cfac644e","Type":"ContainerStarted","Data":"ed7996ddb6dcd8aa672fc7fdcb53dde97fc279586b96ff7daf67b88a71dae92b"} Dec 05 17:31:06 crc kubenswrapper[4753]: I1205 17:31:06.214440 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-n94z9" event={"ID":"9472f12d-6c74-422c-8bc9-76a2ca161b77","Type":"ContainerStarted","Data":"b77b04f6c4812d675178f87df7f7b81041d74c5eed7d8b8493531e2972297860"} Dec 05 17:31:06 crc kubenswrapper[4753]: I1205 17:31:06.219788 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-qpd7t" event={"ID":"0d335585-bcd8-4ddf-a693-421d6d3bf6d2","Type":"ContainerStarted","Data":"40b9d0039a57a0d6d3a8537f5197b9ce330b595ce7b3c16c4b8962dbb17f6fe4"} Dec 05 17:31:06 crc kubenswrapper[4753]: I1205 17:31:06.828338 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ac0841fe-3cba-4397-bfec-67a9cbec6861-metrics-certs\") pod \"openstack-operator-controller-manager-6dd6c4f769-lq2pn\" (UID: \"ac0841fe-3cba-4397-bfec-67a9cbec6861\") " pod="openstack-operators/openstack-operator-controller-manager-6dd6c4f769-lq2pn" Dec 05 17:31:06 crc kubenswrapper[4753]: I1205 17:31:06.829481 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/ac0841fe-3cba-4397-bfec-67a9cbec6861-webhook-certs\") pod \"openstack-operator-controller-manager-6dd6c4f769-lq2pn\" (UID: \"ac0841fe-3cba-4397-bfec-67a9cbec6861\") " pod="openstack-operators/openstack-operator-controller-manager-6dd6c4f769-lq2pn" Dec 05 17:31:06 crc kubenswrapper[4753]: I1205 17:31:06.834035 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/ac0841fe-3cba-4397-bfec-67a9cbec6861-webhook-certs\") pod \"openstack-operator-controller-manager-6dd6c4f769-lq2pn\" (UID: \"ac0841fe-3cba-4397-bfec-67a9cbec6861\") " pod="openstack-operators/openstack-operator-controller-manager-6dd6c4f769-lq2pn" Dec 05 17:31:06 crc kubenswrapper[4753]: I1205 17:31:06.847960 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/ac0841fe-3cba-4397-bfec-67a9cbec6861-metrics-certs\") pod \"openstack-operator-controller-manager-6dd6c4f769-lq2pn\" (UID: \"ac0841fe-3cba-4397-bfec-67a9cbec6861\") " pod="openstack-operators/openstack-operator-controller-manager-6dd6c4f769-lq2pn" Dec 05 17:31:07 crc kubenswrapper[4753]: I1205 17:31:07.035969 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-6dd6c4f769-lq2pn" Dec 05 17:31:07 crc kubenswrapper[4753]: I1205 17:31:07.381321 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-7wzfq" event={"ID":"7b847f69-4008-4acf-bddf-e0ea5a07b6bd","Type":"ContainerStarted","Data":"fc9ca602545a7f1118af0b2a93ff9b3ceaf12be26f0eb5c3f2b13f6d826e2be1"} Dec 05 17:31:07 crc kubenswrapper[4753]: I1205 17:31:07.387466 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-8fdbn" event={"ID":"9e8cbdbf-2604-4e04-a56a-6f2175c09abe","Type":"ContainerStarted","Data":"5aefaab92f176e75c80310e408a708e0a63f21264a1d96bbb636908203388ec6"} Dec 05 17:31:07 crc kubenswrapper[4753]: I1205 17:31:07.389585 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-72b7n" event={"ID":"2f396259-4eaa-465d-9674-9999d750b1f6","Type":"ContainerStarted","Data":"2b7b79da470c31f747b1ee9681a8560afb584ef2587962a1b7dadf21a6397850"} Dec 05 17:31:07 crc kubenswrapper[4753]: I1205 17:31:07.390900 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-p5j4k" event={"ID":"444bb95c-a503-40a1-a99e-64d04b3c8930","Type":"ContainerStarted","Data":"4883f0a3f25e4a9a77fdfeffe3c785aef103e322d42e527eb3e9a210811eca6d"} Dec 05 17:31:07 crc kubenswrapper[4753]: I1205 17:31:07.392108 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-9jsb5" event={"ID":"9712e3f3-fe07-4f19-b04f-6736375fd440","Type":"ContainerStarted","Data":"199150cb25dd78b4ae76c6990dcf6881e5d669953ff64e9ba2e833fdd0f2e4c8"} Dec 05 17:31:08 crc kubenswrapper[4753]: I1205 17:31:08.406999 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-s2rgf" event={"ID":"85fd7687-c296-460a-a2b2-3da36c97efe6","Type":"ContainerStarted","Data":"1e27354aa498e538a26adaa8a016c7fbd411da4d4b7296ac5fb71f3766d61bc5"} Dec 05 17:31:08 crc kubenswrapper[4753]: I1205 17:31:08.410962 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-g7c45" event={"ID":"e485d825-a020-45b2-a642-bba12e1a5112","Type":"ContainerStarted","Data":"f639e0e987c5a54de2fed374285dff00d252448c797c4d7faa33801227100194"} Dec 05 17:31:10 crc kubenswrapper[4753]: I1205 17:31:10.194837 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-6dd6c4f769-lq2pn"] Dec 05 17:31:10 crc kubenswrapper[4753]: E1205 17:31:10.281360 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-s6759" podUID="a3db9b5b-a4b2-40f5-93a0-84ecb72c1c2e" Dec 05 17:31:10 crc kubenswrapper[4753]: E1205 17:31:10.366541 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-k9647" podUID="304f4f1a-f42b-4904-9a77-9e26600eb591" Dec 05 17:31:10 crc kubenswrapper[4753]: I1205 17:31:10.437462 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-hhh6x" event={"ID":"a5f4bc41-be86-43bd-b9af-d8d8cfac644e","Type":"ContainerStarted","Data":"7cd9934a37e1c0d61835dc336216af55976b110c9b3e2c5cc4912167c02b3ab4"} Dec 05 17:31:10 crc kubenswrapper[4753]: I1205 17:31:10.438332 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-hhh6x" Dec 05 17:31:10 crc kubenswrapper[4753]: I1205 17:31:10.447592 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-6dd6c4f769-lq2pn" event={"ID":"ac0841fe-3cba-4397-bfec-67a9cbec6861","Type":"ContainerStarted","Data":"da3260cee57ca2cc6c291377bf952c4c0bfd7c001e7eed5703f2548a2a7198c4"} Dec 05 17:31:10 crc kubenswrapper[4753]: I1205 17:31:10.450049 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-k9647" event={"ID":"304f4f1a-f42b-4904-9a77-9e26600eb591","Type":"ContainerStarted","Data":"9fd27f030e471bbd21669b6b3073253b41d56c5d5522378d6637d202bf7543dc"} Dec 05 17:31:10 crc kubenswrapper[4753]: I1205 17:31:10.463958 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-hhh6x" podStartSLOduration=2.876553236 podStartE2EDuration="37.463936512s" podCreationTimestamp="2025-12-05 17:30:33 +0000 UTC" firstStartedPulling="2025-12-05 17:30:35.355242485 +0000 UTC m=+1573.858349491" lastFinishedPulling="2025-12-05 17:31:09.942625761 +0000 UTC m=+1608.445732767" observedRunningTime="2025-12-05 17:31:10.46175195 +0000 UTC m=+1608.964858956" watchObservedRunningTime="2025-12-05 17:31:10.463936512 +0000 UTC m=+1608.967043518" Dec 05 17:31:10 crc kubenswrapper[4753]: I1205 17:31:10.483800 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-qpd7t" event={"ID":"0d335585-bcd8-4ddf-a693-421d6d3bf6d2","Type":"ContainerStarted","Data":"86f7efd28ca062f0e33e58ea065d45ea6343cc434feeee6dae2bb8eaedf589fc"} Dec 05 17:31:10 crc kubenswrapper[4753]: I1205 17:31:10.501947 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-qpd7t" Dec 05 17:31:10 crc kubenswrapper[4753]: I1205 17:31:10.501962 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-s6759" event={"ID":"a3db9b5b-a4b2-40f5-93a0-84ecb72c1c2e","Type":"ContainerStarted","Data":"a5517e731e891de70479316943820a9b0754485da5d9b29e8d0ea31eac69136f"} Dec 05 17:31:10 crc kubenswrapper[4753]: I1205 17:31:10.504832 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd462vzf" event={"ID":"269f75ec-a232-47f3-8cc8-e9e4c8e9717d","Type":"ContainerStarted","Data":"77f3cde09a601794ae3fc98de899b64af5630a5e31096e60753b5df7d2349d12"} Dec 05 17:31:10 crc kubenswrapper[4753]: I1205 17:31:10.512310 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-j6jtq" event={"ID":"e2bab632-3631-4dcf-b337-12982b375999","Type":"ContainerStarted","Data":"8a1bb98e2b473a2c5605e75bb92f979f6bc48a1fdd9421c582237e019434c155"} Dec 05 17:31:10 crc kubenswrapper[4753]: I1205 17:31:10.578892 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-qpd7t" podStartSLOduration=3.9419486409999998 podStartE2EDuration="37.578875106s" podCreationTimestamp="2025-12-05 17:30:33 +0000 UTC" firstStartedPulling="2025-12-05 17:30:36.300767097 +0000 UTC m=+1574.803874103" lastFinishedPulling="2025-12-05 17:31:09.937693562 +0000 UTC m=+1608.440800568" observedRunningTime="2025-12-05 17:31:10.564105369 +0000 UTC m=+1609.067212395" watchObservedRunningTime="2025-12-05 17:31:10.578875106 +0000 UTC m=+1609.081982112" Dec 05 17:31:10 crc kubenswrapper[4753]: E1205 17:31:10.715724 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/placement-operator-controller-manager-78f8948974-l5jmt" podUID="ebc7e3cf-3701-42f9-a6ca-43f11424a0b3" Dec 05 17:31:10 crc kubenswrapper[4753]: E1205 17:31:10.958566 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/telemetry-operator-controller-manager-75c997498-r5zvk" podUID="41186805-7b90-44a5-b6d6-fe4b6b4d9a79" Dec 05 17:31:11 crc kubenswrapper[4753]: E1205 17:31:11.374070 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-hf596" podUID="a4590a35-52c3-45a7-ba18-81d2db73c384" Dec 05 17:31:11 crc kubenswrapper[4753]: I1205 17:31:11.520674 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-p5j4k" event={"ID":"444bb95c-a503-40a1-a99e-64d04b3c8930","Type":"ContainerStarted","Data":"4995e62f3c69ff47daecb40b80060dfc80dc5b2f80bfc04206e7c47381ba19ed"} Dec 05 17:31:11 crc kubenswrapper[4753]: I1205 17:31:11.520893 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-5854674fcc-p5j4k" Dec 05 17:31:11 crc kubenswrapper[4753]: I1205 17:31:11.522276 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-hf596" event={"ID":"a4590a35-52c3-45a7-ba18-81d2db73c384","Type":"ContainerStarted","Data":"611fbac714cc18e5a18d5efc4affee7e3dbf05182e5cac20cd5c1d45850f0d8b"} Dec 05 17:31:11 crc kubenswrapper[4753]: E1205 17:31:11.523519 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670\\\"\"" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-hf596" podUID="a4590a35-52c3-45a7-ba18-81d2db73c384" Dec 05 17:31:11 crc kubenswrapper[4753]: I1205 17:31:11.525177 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-s6759" event={"ID":"a3db9b5b-a4b2-40f5-93a0-84ecb72c1c2e","Type":"ContainerStarted","Data":"35c458e519778c379d876bf81e25ed530df2073e04657490d366494cfd296d36"} Dec 05 17:31:11 crc kubenswrapper[4753]: I1205 17:31:11.525296 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-s6759" Dec 05 17:31:11 crc kubenswrapper[4753]: I1205 17:31:11.527000 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-vcc5x" event={"ID":"09be61f0-7174-4035-a8f9-315ca512dea4","Type":"ContainerStarted","Data":"9fb95e80612e6e2c22c868cd4d64f02fbf8cc2582d801b09b5e90163f79fd2c8"} Dec 05 17:31:11 crc kubenswrapper[4753]: I1205 17:31:11.527199 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-vcc5x" Dec 05 17:31:11 crc kubenswrapper[4753]: I1205 17:31:11.530087 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-vcc5x" Dec 05 17:31:11 crc kubenswrapper[4753]: I1205 17:31:11.530440 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-pkzrf" event={"ID":"6afd28dd-749e-409b-93ec-30cd85573a95","Type":"ContainerStarted","Data":"a13021884c8ee418186b7e96d76ed56723bf05e9b990058c19a49c0f50d9badf"} Dec 05 17:31:11 crc kubenswrapper[4753]: I1205 17:31:11.530643 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-pkzrf" Dec 05 17:31:11 crc kubenswrapper[4753]: I1205 17:31:11.532297 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-kj4zh" event={"ID":"2d850458-6add-4a44-b1c6-7dba1e8993ab","Type":"ContainerStarted","Data":"62f00893d5285ccb6dd5166e045a047bcfee93f385387de2be9aa69f0ac7d9ce"} Dec 05 17:31:11 crc kubenswrapper[4753]: I1205 17:31:11.532475 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-kj4zh" Dec 05 17:31:11 crc kubenswrapper[4753]: I1205 17:31:11.532530 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-pkzrf" Dec 05 17:31:11 crc kubenswrapper[4753]: I1205 17:31:11.533638 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-75c997498-r5zvk" event={"ID":"41186805-7b90-44a5-b6d6-fe4b6b4d9a79","Type":"ContainerStarted","Data":"ed24bfb1a6514917f037ff2d3d3321674d770ccb97b3d3847a089c7749bcaa17"} Dec 05 17:31:11 crc kubenswrapper[4753]: E1205 17:31:11.534593 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"38.102.83.70:5001/openstack-k8s-operators/telemetry-operator:d41273755bc130d021645570cb35db3b5f04d199\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-75c997498-r5zvk" podUID="41186805-7b90-44a5-b6d6-fe4b6b4d9a79" Dec 05 17:31:11 crc kubenswrapper[4753]: I1205 17:31:11.535611 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-dhjvc" event={"ID":"8e53ed9e-05c6-4a84-894d-85f427a53f72","Type":"ContainerStarted","Data":"2a90af36e5b8ed30be9b15c0c157c40ed647cafc5288bf0b016b935e7738ae3f"} Dec 05 17:31:11 crc kubenswrapper[4753]: I1205 17:31:11.535760 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-dhjvc" Dec 05 17:31:11 crc kubenswrapper[4753]: I1205 17:31:11.535794 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-kj4zh" Dec 05 17:31:11 crc kubenswrapper[4753]: I1205 17:31:11.537655 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-dhjvc" Dec 05 17:31:11 crc kubenswrapper[4753]: I1205 17:31:11.538189 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-s2rgf" event={"ID":"85fd7687-c296-460a-a2b2-3da36c97efe6","Type":"ContainerStarted","Data":"025a84a4bcb74f5b97fb33b5a81ebbde833fbb075ba3eed07bfb00a117f4dadf"} Dec 05 17:31:11 crc kubenswrapper[4753]: I1205 17:31:11.538271 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-s2rgf" Dec 05 17:31:11 crc kubenswrapper[4753]: I1205 17:31:11.539954 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-k9647" event={"ID":"304f4f1a-f42b-4904-9a77-9e26600eb591","Type":"ContainerStarted","Data":"5aae628f61f33c42001a01021089164cb39992fe296b40a4909d76d013718172"} Dec 05 17:31:11 crc kubenswrapper[4753]: I1205 17:31:11.540345 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-k9647" Dec 05 17:31:11 crc kubenswrapper[4753]: I1205 17:31:11.541658 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-6dd6c4f769-lq2pn" event={"ID":"ac0841fe-3cba-4397-bfec-67a9cbec6861","Type":"ContainerStarted","Data":"44063b810211290c50e049c1b193ae3e30f9a9a8d63c8197d9d4eaa95a594555"} Dec 05 17:31:11 crc kubenswrapper[4753]: I1205 17:31:11.541986 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-6dd6c4f769-lq2pn" Dec 05 17:31:11 crc kubenswrapper[4753]: I1205 17:31:11.543088 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-l5jmt" event={"ID":"ebc7e3cf-3701-42f9-a6ca-43f11424a0b3","Type":"ContainerStarted","Data":"832c5c6f99b5057d7e71b8f73c4bf9f0972ef528aa7191e7846174a34697dfe8"} Dec 05 17:31:11 crc kubenswrapper[4753]: I1205 17:31:11.551255 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-g7c45" event={"ID":"e485d825-a020-45b2-a642-bba12e1a5112","Type":"ContainerStarted","Data":"c9d027b21456a6f32ab1f18710362bd692550bf93547c3c987077812e66c8efa"} Dec 05 17:31:11 crc kubenswrapper[4753]: I1205 17:31:11.551731 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-g7c45" Dec 05 17:31:11 crc kubenswrapper[4753]: I1205 17:31:11.558645 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-j6jtq" event={"ID":"e2bab632-3631-4dcf-b337-12982b375999","Type":"ContainerStarted","Data":"9af67a8a5c323e30763ca827900ac027e59a9f1f4ad5a018d4280a69388ee83d"} Dec 05 17:31:11 crc kubenswrapper[4753]: I1205 17:31:11.559025 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-57548d458d-j6jtq" Dec 05 17:31:11 crc kubenswrapper[4753]: I1205 17:31:11.561405 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-n94z9" event={"ID":"9472f12d-6c74-422c-8bc9-76a2ca161b77","Type":"ContainerStarted","Data":"25c52cad45e84d9d921fe2b2a86c16545f7a88ae1542ae09c3b09fdad5b7fd75"} Dec 05 17:31:11 crc kubenswrapper[4753]: I1205 17:31:11.561635 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-n94z9" Dec 05 17:31:11 crc kubenswrapper[4753]: I1205 17:31:11.566471 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-n94z9" Dec 05 17:31:11 crc kubenswrapper[4753]: I1205 17:31:11.570713 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-8fdbn" event={"ID":"9e8cbdbf-2604-4e04-a56a-6f2175c09abe","Type":"ContainerStarted","Data":"7a90a317e3dbe2e27a775d997301df3fb25c518d57d06552bd26fe64c3b28e8d"} Dec 05 17:31:11 crc kubenswrapper[4753]: I1205 17:31:11.571365 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-8fdbn" Dec 05 17:31:11 crc kubenswrapper[4753]: I1205 17:31:11.574280 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-72b7n" event={"ID":"2f396259-4eaa-465d-9674-9999d750b1f6","Type":"ContainerStarted","Data":"5dd02c7ef9cea9bafc197c4a092f8b4abdee437eed46c43796a9b0ff5c1d1a28"} Dec 05 17:31:11 crc kubenswrapper[4753]: I1205 17:31:11.574954 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-998648c74-72b7n" Dec 05 17:31:11 crc kubenswrapper[4753]: I1205 17:31:11.576043 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-5854674fcc-p5j4k" podStartSLOduration=3.869818665 podStartE2EDuration="37.576032315s" podCreationTimestamp="2025-12-05 17:30:34 +0000 UTC" firstStartedPulling="2025-12-05 17:30:36.345730096 +0000 UTC m=+1574.848837102" lastFinishedPulling="2025-12-05 17:31:10.051943746 +0000 UTC m=+1608.555050752" observedRunningTime="2025-12-05 17:31:11.551869823 +0000 UTC m=+1610.054976829" watchObservedRunningTime="2025-12-05 17:31:11.576032315 +0000 UTC m=+1610.079139321" Dec 05 17:31:11 crc kubenswrapper[4753]: I1205 17:31:11.581481 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-998648c74-72b7n" Dec 05 17:31:11 crc kubenswrapper[4753]: I1205 17:31:11.582809 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-9jsb5" event={"ID":"9712e3f3-fe07-4f19-b04f-6736375fd440","Type":"ContainerStarted","Data":"395151306cd92f4944a1c5f99313b93098d9989a70ad697c3b233e808cbfd67a"} Dec 05 17:31:11 crc kubenswrapper[4753]: I1205 17:31:11.583782 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-9jsb5" Dec 05 17:31:11 crc kubenswrapper[4753]: I1205 17:31:11.587455 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-8fdbn" Dec 05 17:31:11 crc kubenswrapper[4753]: I1205 17:31:11.592591 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-9jsb5" Dec 05 17:31:11 crc kubenswrapper[4753]: I1205 17:31:11.593514 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-7wzfq" event={"ID":"7b847f69-4008-4acf-bddf-e0ea5a07b6bd","Type":"ContainerStarted","Data":"25b4b814b33b0b011358207ac4c7863cb587c1001dfc5de89c4cea22e018f5b3"} Dec 05 17:31:11 crc kubenswrapper[4753]: I1205 17:31:11.596054 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-7wzfq" Dec 05 17:31:11 crc kubenswrapper[4753]: I1205 17:31:11.601728 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-7wzfq" Dec 05 17:31:11 crc kubenswrapper[4753]: I1205 17:31:11.605534 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd462vzf" event={"ID":"269f75ec-a232-47f3-8cc8-e9e4c8e9717d","Type":"ContainerStarted","Data":"7fc96f95b65d1c9ccb0f9b80b8ad6c2e61c716d3d770e06770e3030d4143d346"} Dec 05 17:31:11 crc kubenswrapper[4753]: I1205 17:31:11.605874 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd462vzf" Dec 05 17:31:11 crc kubenswrapper[4753]: I1205 17:31:11.613801 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-hhh6x" Dec 05 17:31:11 crc kubenswrapper[4753]: I1205 17:31:11.614379 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-s6759" podStartSLOduration=3.42383934 podStartE2EDuration="38.614369217s" podCreationTimestamp="2025-12-05 17:30:33 +0000 UTC" firstStartedPulling="2025-12-05 17:30:35.860822282 +0000 UTC m=+1574.363929288" lastFinishedPulling="2025-12-05 17:31:11.051352159 +0000 UTC m=+1609.554459165" observedRunningTime="2025-12-05 17:31:11.578529226 +0000 UTC m=+1610.081636242" watchObservedRunningTime="2025-12-05 17:31:11.614369217 +0000 UTC m=+1610.117476223" Dec 05 17:31:11 crc kubenswrapper[4753]: I1205 17:31:11.625105 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-qpd7t" Dec 05 17:31:11 crc kubenswrapper[4753]: I1205 17:31:11.653008 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-vcc5x" podStartSLOduration=4.55072409 podStartE2EDuration="38.652988697s" podCreationTimestamp="2025-12-05 17:30:33 +0000 UTC" firstStartedPulling="2025-12-05 17:30:35.889576423 +0000 UTC m=+1574.392683429" lastFinishedPulling="2025-12-05 17:31:09.99184102 +0000 UTC m=+1608.494948036" observedRunningTime="2025-12-05 17:31:11.649820277 +0000 UTC m=+1610.152927283" watchObservedRunningTime="2025-12-05 17:31:11.652988697 +0000 UTC m=+1610.156095693" Dec 05 17:31:11 crc kubenswrapper[4753]: I1205 17:31:11.704772 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-g7c45" podStartSLOduration=4.762273961 podStartE2EDuration="38.704757838s" podCreationTimestamp="2025-12-05 17:30:33 +0000 UTC" firstStartedPulling="2025-12-05 17:30:36.112093063 +0000 UTC m=+1574.615200069" lastFinishedPulling="2025-12-05 17:31:10.05457694 +0000 UTC m=+1608.557683946" observedRunningTime="2025-12-05 17:31:11.700461956 +0000 UTC m=+1610.203568962" watchObservedRunningTime="2025-12-05 17:31:11.704757838 +0000 UTC m=+1610.207864844" Dec 05 17:31:11 crc kubenswrapper[4753]: I1205 17:31:11.806675 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-6dd6c4f769-lq2pn" podStartSLOduration=37.806655203 podStartE2EDuration="37.806655203s" podCreationTimestamp="2025-12-05 17:30:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:31:11.799234524 +0000 UTC m=+1610.302341530" watchObservedRunningTime="2025-12-05 17:31:11.806655203 +0000 UTC m=+1610.309762209" Dec 05 17:31:11 crc kubenswrapper[4753]: I1205 17:31:11.828609 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-pkzrf" podStartSLOduration=4.562469652 podStartE2EDuration="38.828593682s" podCreationTimestamp="2025-12-05 17:30:33 +0000 UTC" firstStartedPulling="2025-12-05 17:30:35.782279296 +0000 UTC m=+1574.285386312" lastFinishedPulling="2025-12-05 17:31:10.048403326 +0000 UTC m=+1608.551510342" observedRunningTime="2025-12-05 17:31:11.826468062 +0000 UTC m=+1610.329575068" watchObservedRunningTime="2025-12-05 17:31:11.828593682 +0000 UTC m=+1610.331700688" Dec 05 17:31:11 crc kubenswrapper[4753]: I1205 17:31:11.891776 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-dhjvc" podStartSLOduration=3.985074848 podStartE2EDuration="37.891759645s" podCreationTimestamp="2025-12-05 17:30:34 +0000 UTC" firstStartedPulling="2025-12-05 17:30:36.086895202 +0000 UTC m=+1574.590002198" lastFinishedPulling="2025-12-05 17:31:09.993579969 +0000 UTC m=+1608.496686995" observedRunningTime="2025-12-05 17:31:11.888363879 +0000 UTC m=+1610.391470905" watchObservedRunningTime="2025-12-05 17:31:11.891759645 +0000 UTC m=+1610.394866651" Dec 05 17:31:11 crc kubenswrapper[4753]: I1205 17:31:11.892444 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-kj4zh" podStartSLOduration=4.629906125 podStartE2EDuration="38.892436644s" podCreationTimestamp="2025-12-05 17:30:33 +0000 UTC" firstStartedPulling="2025-12-05 17:30:35.78456805 +0000 UTC m=+1574.287675056" lastFinishedPulling="2025-12-05 17:31:10.047098559 +0000 UTC m=+1608.550205575" observedRunningTime="2025-12-05 17:31:11.865750991 +0000 UTC m=+1610.368857997" watchObservedRunningTime="2025-12-05 17:31:11.892436644 +0000 UTC m=+1610.395543650" Dec 05 17:31:11 crc kubenswrapper[4753]: I1205 17:31:11.918751 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-k9647" podStartSLOduration=3.8013743140000003 podStartE2EDuration="38.918733086s" podCreationTimestamp="2025-12-05 17:30:33 +0000 UTC" firstStartedPulling="2025-12-05 17:30:35.768674392 +0000 UTC m=+1574.271781398" lastFinishedPulling="2025-12-05 17:31:10.886033164 +0000 UTC m=+1609.389140170" observedRunningTime="2025-12-05 17:31:11.911351408 +0000 UTC m=+1610.414458424" watchObservedRunningTime="2025-12-05 17:31:11.918733086 +0000 UTC m=+1610.421840082" Dec 05 17:31:11 crc kubenswrapper[4753]: I1205 17:31:11.941398 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-s2rgf" podStartSLOduration=4.7762222439999995 podStartE2EDuration="38.941383415s" podCreationTimestamp="2025-12-05 17:30:33 +0000 UTC" firstStartedPulling="2025-12-05 17:30:35.884583583 +0000 UTC m=+1574.387690589" lastFinishedPulling="2025-12-05 17:31:10.049744744 +0000 UTC m=+1608.552851760" observedRunningTime="2025-12-05 17:31:11.935335035 +0000 UTC m=+1610.438442041" watchObservedRunningTime="2025-12-05 17:31:11.941383415 +0000 UTC m=+1610.444490421" Dec 05 17:31:12 crc kubenswrapper[4753]: I1205 17:31:12.092065 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-57548d458d-j6jtq" podStartSLOduration=34.513325748 podStartE2EDuration="39.092045237s" podCreationTimestamp="2025-12-05 17:30:33 +0000 UTC" firstStartedPulling="2025-12-05 17:31:05.208415384 +0000 UTC m=+1603.711522380" lastFinishedPulling="2025-12-05 17:31:09.787134843 +0000 UTC m=+1608.290241869" observedRunningTime="2025-12-05 17:31:12.075720846 +0000 UTC m=+1610.578827852" watchObservedRunningTime="2025-12-05 17:31:12.092045237 +0000 UTC m=+1610.595152233" Dec 05 17:31:12 crc kubenswrapper[4753]: I1205 17:31:12.138235 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-9jsb5" podStartSLOduration=4.481283671 podStartE2EDuration="39.13821521s" podCreationTimestamp="2025-12-05 17:30:33 +0000 UTC" firstStartedPulling="2025-12-05 17:30:35.348418522 +0000 UTC m=+1573.851525528" lastFinishedPulling="2025-12-05 17:31:10.005350051 +0000 UTC m=+1608.508457067" observedRunningTime="2025-12-05 17:31:12.11554659 +0000 UTC m=+1610.618653596" watchObservedRunningTime="2025-12-05 17:31:12.13821521 +0000 UTC m=+1610.641322216" Dec 05 17:31:12 crc kubenswrapper[4753]: I1205 17:31:12.153622 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-7wzfq" podStartSLOduration=4.428197203 podStartE2EDuration="38.153603054s" podCreationTimestamp="2025-12-05 17:30:34 +0000 UTC" firstStartedPulling="2025-12-05 17:30:36.324116266 +0000 UTC m=+1574.827223272" lastFinishedPulling="2025-12-05 17:31:10.049522107 +0000 UTC m=+1608.552629123" observedRunningTime="2025-12-05 17:31:12.151639639 +0000 UTC m=+1610.654746645" watchObservedRunningTime="2025-12-05 17:31:12.153603054 +0000 UTC m=+1610.656710060" Dec 05 17:31:12 crc kubenswrapper[4753]: I1205 17:31:12.235860 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-998648c74-72b7n" podStartSLOduration=4.28420738 podStartE2EDuration="38.235808194s" podCreationTimestamp="2025-12-05 17:30:34 +0000 UTC" firstStartedPulling="2025-12-05 17:30:36.098334135 +0000 UTC m=+1574.601441141" lastFinishedPulling="2025-12-05 17:31:10.049934949 +0000 UTC m=+1608.553041955" observedRunningTime="2025-12-05 17:31:12.183489947 +0000 UTC m=+1610.686596953" watchObservedRunningTime="2025-12-05 17:31:12.235808194 +0000 UTC m=+1610.738915200" Dec 05 17:31:12 crc kubenswrapper[4753]: I1205 17:31:12.236564 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-8fdbn" podStartSLOduration=4.288292214 podStartE2EDuration="38.236559175s" podCreationTimestamp="2025-12-05 17:30:34 +0000 UTC" firstStartedPulling="2025-12-05 17:30:36.100875086 +0000 UTC m=+1574.603982092" lastFinishedPulling="2025-12-05 17:31:10.049142037 +0000 UTC m=+1608.552249053" observedRunningTime="2025-12-05 17:31:12.223742153 +0000 UTC m=+1610.726849159" watchObservedRunningTime="2025-12-05 17:31:12.236559175 +0000 UTC m=+1610.739666181" Dec 05 17:31:12 crc kubenswrapper[4753]: I1205 17:31:12.295112 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd462vzf" podStartSLOduration=34.292600729 podStartE2EDuration="38.295094157s" podCreationTimestamp="2025-12-05 17:30:34 +0000 UTC" firstStartedPulling="2025-12-05 17:31:05.756443039 +0000 UTC m=+1604.259550045" lastFinishedPulling="2025-12-05 17:31:09.758936457 +0000 UTC m=+1608.262043473" observedRunningTime="2025-12-05 17:31:12.290544008 +0000 UTC m=+1610.793651014" watchObservedRunningTime="2025-12-05 17:31:12.295094157 +0000 UTC m=+1610.798201163" Dec 05 17:31:12 crc kubenswrapper[4753]: I1205 17:31:12.318389 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-n94z9" podStartSLOduration=4.647355697 podStartE2EDuration="39.318370934s" podCreationTimestamp="2025-12-05 17:30:33 +0000 UTC" firstStartedPulling="2025-12-05 17:30:35.320784772 +0000 UTC m=+1573.823891768" lastFinishedPulling="2025-12-05 17:31:09.991799999 +0000 UTC m=+1608.494907005" observedRunningTime="2025-12-05 17:31:12.31185374 +0000 UTC m=+1610.814960746" watchObservedRunningTime="2025-12-05 17:31:12.318370934 +0000 UTC m=+1610.821477940" Dec 05 17:31:12 crc kubenswrapper[4753]: I1205 17:31:12.611218 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-l5jmt" event={"ID":"ebc7e3cf-3701-42f9-a6ca-43f11424a0b3","Type":"ContainerStarted","Data":"9eb955722eef31edb51d16a8bb1bb52ada8e104155b1593d5c6042259eebd6a7"} Dec 05 17:31:12 crc kubenswrapper[4753]: I1205 17:31:12.616184 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-78f8948974-l5jmt" Dec 05 17:31:12 crc kubenswrapper[4753]: I1205 17:31:12.616300 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-s2rgf" Dec 05 17:31:12 crc kubenswrapper[4753]: I1205 17:31:12.617395 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-5854674fcc-p5j4k" Dec 05 17:31:12 crc kubenswrapper[4753]: I1205 17:31:12.617461 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-g7c45" Dec 05 17:31:12 crc kubenswrapper[4753]: I1205 17:31:12.631529 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-78f8948974-l5jmt" podStartSLOduration=2.729507876 podStartE2EDuration="38.63151605s" podCreationTimestamp="2025-12-05 17:30:34 +0000 UTC" firstStartedPulling="2025-12-05 17:30:36.081307094 +0000 UTC m=+1574.584414100" lastFinishedPulling="2025-12-05 17:31:11.983315268 +0000 UTC m=+1610.486422274" observedRunningTime="2025-12-05 17:31:12.628534185 +0000 UTC m=+1611.131641191" watchObservedRunningTime="2025-12-05 17:31:12.63151605 +0000 UTC m=+1611.134623046" Dec 05 17:31:17 crc kubenswrapper[4753]: I1205 17:31:17.045666 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-6dd6c4f769-lq2pn" Dec 05 17:31:18 crc kubenswrapper[4753]: E1205 17:31:18.722557 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-zl7c6" podUID="cca913d2-4dbb-4cd8-8575-4af52cc95501" Dec 05 17:31:20 crc kubenswrapper[4753]: I1205 17:31:20.154456 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-57548d458d-j6jtq" Dec 05 17:31:20 crc kubenswrapper[4753]: I1205 17:31:20.507482 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd462vzf" Dec 05 17:31:24 crc kubenswrapper[4753]: I1205 17:31:24.302718 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-k9647" Dec 05 17:31:24 crc kubenswrapper[4753]: I1205 17:31:24.480639 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-s6759" Dec 05 17:31:24 crc kubenswrapper[4753]: I1205 17:31:24.782471 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-78f8948974-l5jmt" Dec 05 17:31:27 crc kubenswrapper[4753]: I1205 17:31:27.779978 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-75c997498-r5zvk" event={"ID":"41186805-7b90-44a5-b6d6-fe4b6b4d9a79","Type":"ContainerStarted","Data":"c1fab45f4392e6a6e455e3574ae5f20cda943c113bd5ea8441916d353653c971"} Dec 05 17:31:27 crc kubenswrapper[4753]: I1205 17:31:27.781291 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-75c997498-r5zvk" Dec 05 17:31:27 crc kubenswrapper[4753]: I1205 17:31:27.783799 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-hf596" event={"ID":"a4590a35-52c3-45a7-ba18-81d2db73c384","Type":"ContainerStarted","Data":"9d9f800c2f83dbc8aad1cc3e2aefd65d9af84b8167f55a9ba4c63c7e339d4a73"} Dec 05 17:31:27 crc kubenswrapper[4753]: I1205 17:31:27.784961 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-hf596" Dec 05 17:31:27 crc kubenswrapper[4753]: I1205 17:31:27.800295 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-75c997498-r5zvk" podStartSLOduration=4.11675182 podStartE2EDuration="53.800281062s" podCreationTimestamp="2025-12-05 17:30:34 +0000 UTC" firstStartedPulling="2025-12-05 17:30:36.112053742 +0000 UTC m=+1574.615160748" lastFinishedPulling="2025-12-05 17:31:25.795582984 +0000 UTC m=+1624.298689990" observedRunningTime="2025-12-05 17:31:27.799446078 +0000 UTC m=+1626.302553104" watchObservedRunningTime="2025-12-05 17:31:27.800281062 +0000 UTC m=+1626.303388068" Dec 05 17:31:27 crc kubenswrapper[4753]: I1205 17:31:27.814886 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-hf596" podStartSLOduration=3.817522495 podStartE2EDuration="54.814867713s" podCreationTimestamp="2025-12-05 17:30:33 +0000 UTC" firstStartedPulling="2025-12-05 17:30:36.112098543 +0000 UTC m=+1574.615205549" lastFinishedPulling="2025-12-05 17:31:27.109443731 +0000 UTC m=+1625.612550767" observedRunningTime="2025-12-05 17:31:27.814227205 +0000 UTC m=+1626.317334211" watchObservedRunningTime="2025-12-05 17:31:27.814867713 +0000 UTC m=+1626.317974729" Dec 05 17:31:28 crc kubenswrapper[4753]: I1205 17:31:28.979664 4753 patch_prober.go:28] interesting pod/machine-config-daemon-khn68 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 17:31:28 crc kubenswrapper[4753]: I1205 17:31:28.980021 4753 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 17:31:33 crc kubenswrapper[4753]: I1205 17:31:33.844020 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-zl7c6" event={"ID":"cca913d2-4dbb-4cd8-8575-4af52cc95501","Type":"ContainerStarted","Data":"d693295c1a161665404b115d485ec1c41ce7a74feb673b1aee371fcb85dbb22d"} Dec 05 17:31:33 crc kubenswrapper[4753]: I1205 17:31:33.864539 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-zl7c6" podStartSLOduration=2.9853332 podStartE2EDuration="59.86451772s" podCreationTimestamp="2025-12-05 17:30:34 +0000 UTC" firstStartedPulling="2025-12-05 17:30:36.302084964 +0000 UTC m=+1574.805191970" lastFinishedPulling="2025-12-05 17:31:33.181269464 +0000 UTC m=+1631.684376490" observedRunningTime="2025-12-05 17:31:33.85848759 +0000 UTC m=+1632.361594606" watchObservedRunningTime="2025-12-05 17:31:33.86451772 +0000 UTC m=+1632.367624746" Dec 05 17:31:34 crc kubenswrapper[4753]: I1205 17:31:34.640561 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-hf596" Dec 05 17:31:35 crc kubenswrapper[4753]: I1205 17:31:35.095949 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-75c997498-r5zvk" Dec 05 17:31:51 crc kubenswrapper[4753]: I1205 17:31:51.415683 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-t4hjs"] Dec 05 17:31:51 crc kubenswrapper[4753]: E1205 17:31:51.417680 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df9c3114-8e90-4129-9a46-a99986adccd2" containerName="extract-utilities" Dec 05 17:31:51 crc kubenswrapper[4753]: I1205 17:31:51.417698 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="df9c3114-8e90-4129-9a46-a99986adccd2" containerName="extract-utilities" Dec 05 17:31:51 crc kubenswrapper[4753]: E1205 17:31:51.417725 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df9c3114-8e90-4129-9a46-a99986adccd2" containerName="registry-server" Dec 05 17:31:51 crc kubenswrapper[4753]: I1205 17:31:51.417731 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="df9c3114-8e90-4129-9a46-a99986adccd2" containerName="registry-server" Dec 05 17:31:51 crc kubenswrapper[4753]: E1205 17:31:51.417759 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df9c3114-8e90-4129-9a46-a99986adccd2" containerName="extract-content" Dec 05 17:31:51 crc kubenswrapper[4753]: I1205 17:31:51.417766 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="df9c3114-8e90-4129-9a46-a99986adccd2" containerName="extract-content" Dec 05 17:31:51 crc kubenswrapper[4753]: I1205 17:31:51.417912 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="df9c3114-8e90-4129-9a46-a99986adccd2" containerName="registry-server" Dec 05 17:31:51 crc kubenswrapper[4753]: I1205 17:31:51.418733 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-t4hjs" Dec 05 17:31:51 crc kubenswrapper[4753]: I1205 17:31:51.422085 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Dec 05 17:31:51 crc kubenswrapper[4753]: I1205 17:31:51.422729 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-gk442" Dec 05 17:31:51 crc kubenswrapper[4753]: I1205 17:31:51.427552 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-t4hjs"] Dec 05 17:31:51 crc kubenswrapper[4753]: I1205 17:31:51.428012 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Dec 05 17:31:51 crc kubenswrapper[4753]: I1205 17:31:51.428530 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Dec 05 17:31:51 crc kubenswrapper[4753]: I1205 17:31:51.472775 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-ndvfw"] Dec 05 17:31:51 crc kubenswrapper[4753]: I1205 17:31:51.474470 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-ndvfw" Dec 05 17:31:51 crc kubenswrapper[4753]: I1205 17:31:51.482746 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Dec 05 17:31:51 crc kubenswrapper[4753]: I1205 17:31:51.491122 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-ndvfw"] Dec 05 17:31:51 crc kubenswrapper[4753]: I1205 17:31:51.582714 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/70bc6ed3-d663-46b8-bd6a-2985905fceb8-config\") pod \"dnsmasq-dns-675f4bcbfc-t4hjs\" (UID: \"70bc6ed3-d663-46b8-bd6a-2985905fceb8\") " pod="openstack/dnsmasq-dns-675f4bcbfc-t4hjs" Dec 05 17:31:51 crc kubenswrapper[4753]: I1205 17:31:51.582770 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/37bad1d4-a7ff-49ef-bfa0-70d3c939b5d9-config\") pod \"dnsmasq-dns-78dd6ddcc-ndvfw\" (UID: \"37bad1d4-a7ff-49ef-bfa0-70d3c939b5d9\") " pod="openstack/dnsmasq-dns-78dd6ddcc-ndvfw" Dec 05 17:31:51 crc kubenswrapper[4753]: I1205 17:31:51.582826 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/37bad1d4-a7ff-49ef-bfa0-70d3c939b5d9-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-ndvfw\" (UID: \"37bad1d4-a7ff-49ef-bfa0-70d3c939b5d9\") " pod="openstack/dnsmasq-dns-78dd6ddcc-ndvfw" Dec 05 17:31:51 crc kubenswrapper[4753]: I1205 17:31:51.582933 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jfzzp\" (UniqueName: \"kubernetes.io/projected/70bc6ed3-d663-46b8-bd6a-2985905fceb8-kube-api-access-jfzzp\") pod \"dnsmasq-dns-675f4bcbfc-t4hjs\" (UID: \"70bc6ed3-d663-46b8-bd6a-2985905fceb8\") " pod="openstack/dnsmasq-dns-675f4bcbfc-t4hjs" Dec 05 17:31:51 crc kubenswrapper[4753]: I1205 17:31:51.582986 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8l6cx\" (UniqueName: \"kubernetes.io/projected/37bad1d4-a7ff-49ef-bfa0-70d3c939b5d9-kube-api-access-8l6cx\") pod \"dnsmasq-dns-78dd6ddcc-ndvfw\" (UID: \"37bad1d4-a7ff-49ef-bfa0-70d3c939b5d9\") " pod="openstack/dnsmasq-dns-78dd6ddcc-ndvfw" Dec 05 17:31:51 crc kubenswrapper[4753]: I1205 17:31:51.684830 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/37bad1d4-a7ff-49ef-bfa0-70d3c939b5d9-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-ndvfw\" (UID: \"37bad1d4-a7ff-49ef-bfa0-70d3c939b5d9\") " pod="openstack/dnsmasq-dns-78dd6ddcc-ndvfw" Dec 05 17:31:51 crc kubenswrapper[4753]: I1205 17:31:51.684904 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jfzzp\" (UniqueName: \"kubernetes.io/projected/70bc6ed3-d663-46b8-bd6a-2985905fceb8-kube-api-access-jfzzp\") pod \"dnsmasq-dns-675f4bcbfc-t4hjs\" (UID: \"70bc6ed3-d663-46b8-bd6a-2985905fceb8\") " pod="openstack/dnsmasq-dns-675f4bcbfc-t4hjs" Dec 05 17:31:51 crc kubenswrapper[4753]: I1205 17:31:51.684930 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8l6cx\" (UniqueName: \"kubernetes.io/projected/37bad1d4-a7ff-49ef-bfa0-70d3c939b5d9-kube-api-access-8l6cx\") pod \"dnsmasq-dns-78dd6ddcc-ndvfw\" (UID: \"37bad1d4-a7ff-49ef-bfa0-70d3c939b5d9\") " pod="openstack/dnsmasq-dns-78dd6ddcc-ndvfw" Dec 05 17:31:51 crc kubenswrapper[4753]: I1205 17:31:51.685019 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/70bc6ed3-d663-46b8-bd6a-2985905fceb8-config\") pod \"dnsmasq-dns-675f4bcbfc-t4hjs\" (UID: \"70bc6ed3-d663-46b8-bd6a-2985905fceb8\") " pod="openstack/dnsmasq-dns-675f4bcbfc-t4hjs" Dec 05 17:31:51 crc kubenswrapper[4753]: I1205 17:31:51.685055 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/37bad1d4-a7ff-49ef-bfa0-70d3c939b5d9-config\") pod \"dnsmasq-dns-78dd6ddcc-ndvfw\" (UID: \"37bad1d4-a7ff-49ef-bfa0-70d3c939b5d9\") " pod="openstack/dnsmasq-dns-78dd6ddcc-ndvfw" Dec 05 17:31:51 crc kubenswrapper[4753]: I1205 17:31:51.686234 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/70bc6ed3-d663-46b8-bd6a-2985905fceb8-config\") pod \"dnsmasq-dns-675f4bcbfc-t4hjs\" (UID: \"70bc6ed3-d663-46b8-bd6a-2985905fceb8\") " pod="openstack/dnsmasq-dns-675f4bcbfc-t4hjs" Dec 05 17:31:51 crc kubenswrapper[4753]: I1205 17:31:51.686320 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/37bad1d4-a7ff-49ef-bfa0-70d3c939b5d9-config\") pod \"dnsmasq-dns-78dd6ddcc-ndvfw\" (UID: \"37bad1d4-a7ff-49ef-bfa0-70d3c939b5d9\") " pod="openstack/dnsmasq-dns-78dd6ddcc-ndvfw" Dec 05 17:31:51 crc kubenswrapper[4753]: I1205 17:31:51.686424 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/37bad1d4-a7ff-49ef-bfa0-70d3c939b5d9-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-ndvfw\" (UID: \"37bad1d4-a7ff-49ef-bfa0-70d3c939b5d9\") " pod="openstack/dnsmasq-dns-78dd6ddcc-ndvfw" Dec 05 17:31:51 crc kubenswrapper[4753]: I1205 17:31:51.719044 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8l6cx\" (UniqueName: \"kubernetes.io/projected/37bad1d4-a7ff-49ef-bfa0-70d3c939b5d9-kube-api-access-8l6cx\") pod \"dnsmasq-dns-78dd6ddcc-ndvfw\" (UID: \"37bad1d4-a7ff-49ef-bfa0-70d3c939b5d9\") " pod="openstack/dnsmasq-dns-78dd6ddcc-ndvfw" Dec 05 17:31:51 crc kubenswrapper[4753]: I1205 17:31:51.724824 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jfzzp\" (UniqueName: \"kubernetes.io/projected/70bc6ed3-d663-46b8-bd6a-2985905fceb8-kube-api-access-jfzzp\") pod \"dnsmasq-dns-675f4bcbfc-t4hjs\" (UID: \"70bc6ed3-d663-46b8-bd6a-2985905fceb8\") " pod="openstack/dnsmasq-dns-675f4bcbfc-t4hjs" Dec 05 17:31:51 crc kubenswrapper[4753]: I1205 17:31:51.745431 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-t4hjs" Dec 05 17:31:51 crc kubenswrapper[4753]: I1205 17:31:51.792506 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-ndvfw" Dec 05 17:31:52 crc kubenswrapper[4753]: I1205 17:31:52.256171 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-t4hjs"] Dec 05 17:31:52 crc kubenswrapper[4753]: I1205 17:31:52.334757 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-ndvfw"] Dec 05 17:31:52 crc kubenswrapper[4753]: W1205 17:31:52.338045 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod37bad1d4_a7ff_49ef_bfa0_70d3c939b5d9.slice/crio-696d72b151fcfb1eff7e8e55085b9b348d324038f4c1038d1a1f7c212637c023 WatchSource:0}: Error finding container 696d72b151fcfb1eff7e8e55085b9b348d324038f4c1038d1a1f7c212637c023: Status 404 returned error can't find the container with id 696d72b151fcfb1eff7e8e55085b9b348d324038f4c1038d1a1f7c212637c023 Dec 05 17:31:52 crc kubenswrapper[4753]: I1205 17:31:52.999707 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-ndvfw" event={"ID":"37bad1d4-a7ff-49ef-bfa0-70d3c939b5d9","Type":"ContainerStarted","Data":"696d72b151fcfb1eff7e8e55085b9b348d324038f4c1038d1a1f7c212637c023"} Dec 05 17:31:53 crc kubenswrapper[4753]: I1205 17:31:53.001501 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-t4hjs" event={"ID":"70bc6ed3-d663-46b8-bd6a-2985905fceb8","Type":"ContainerStarted","Data":"cacb634bd9738be217523958802e4da5573ce3a570491034a301997b5a53c13c"} Dec 05 17:31:54 crc kubenswrapper[4753]: I1205 17:31:54.631395 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-t4hjs"] Dec 05 17:31:54 crc kubenswrapper[4753]: I1205 17:31:54.656176 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-vznhm"] Dec 05 17:31:54 crc kubenswrapper[4753]: I1205 17:31:54.659480 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-vznhm" Dec 05 17:31:54 crc kubenswrapper[4753]: I1205 17:31:54.672724 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-vznhm"] Dec 05 17:31:54 crc kubenswrapper[4753]: I1205 17:31:54.730315 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g8lr4\" (UniqueName: \"kubernetes.io/projected/22d4d35e-da87-4f57-a984-0bd8a4b10a3f-kube-api-access-g8lr4\") pod \"dnsmasq-dns-666b6646f7-vznhm\" (UID: \"22d4d35e-da87-4f57-a984-0bd8a4b10a3f\") " pod="openstack/dnsmasq-dns-666b6646f7-vznhm" Dec 05 17:31:54 crc kubenswrapper[4753]: I1205 17:31:54.730364 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22d4d35e-da87-4f57-a984-0bd8a4b10a3f-config\") pod \"dnsmasq-dns-666b6646f7-vznhm\" (UID: \"22d4d35e-da87-4f57-a984-0bd8a4b10a3f\") " pod="openstack/dnsmasq-dns-666b6646f7-vznhm" Dec 05 17:31:54 crc kubenswrapper[4753]: I1205 17:31:54.730425 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/22d4d35e-da87-4f57-a984-0bd8a4b10a3f-dns-svc\") pod \"dnsmasq-dns-666b6646f7-vznhm\" (UID: \"22d4d35e-da87-4f57-a984-0bd8a4b10a3f\") " pod="openstack/dnsmasq-dns-666b6646f7-vznhm" Dec 05 17:31:54 crc kubenswrapper[4753]: I1205 17:31:54.832002 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/22d4d35e-da87-4f57-a984-0bd8a4b10a3f-dns-svc\") pod \"dnsmasq-dns-666b6646f7-vznhm\" (UID: \"22d4d35e-da87-4f57-a984-0bd8a4b10a3f\") " pod="openstack/dnsmasq-dns-666b6646f7-vznhm" Dec 05 17:31:54 crc kubenswrapper[4753]: I1205 17:31:54.832070 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g8lr4\" (UniqueName: \"kubernetes.io/projected/22d4d35e-da87-4f57-a984-0bd8a4b10a3f-kube-api-access-g8lr4\") pod \"dnsmasq-dns-666b6646f7-vznhm\" (UID: \"22d4d35e-da87-4f57-a984-0bd8a4b10a3f\") " pod="openstack/dnsmasq-dns-666b6646f7-vznhm" Dec 05 17:31:54 crc kubenswrapper[4753]: I1205 17:31:54.832108 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22d4d35e-da87-4f57-a984-0bd8a4b10a3f-config\") pod \"dnsmasq-dns-666b6646f7-vznhm\" (UID: \"22d4d35e-da87-4f57-a984-0bd8a4b10a3f\") " pod="openstack/dnsmasq-dns-666b6646f7-vznhm" Dec 05 17:31:54 crc kubenswrapper[4753]: I1205 17:31:54.833798 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22d4d35e-da87-4f57-a984-0bd8a4b10a3f-config\") pod \"dnsmasq-dns-666b6646f7-vznhm\" (UID: \"22d4d35e-da87-4f57-a984-0bd8a4b10a3f\") " pod="openstack/dnsmasq-dns-666b6646f7-vznhm" Dec 05 17:31:54 crc kubenswrapper[4753]: I1205 17:31:54.833993 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/22d4d35e-da87-4f57-a984-0bd8a4b10a3f-dns-svc\") pod \"dnsmasq-dns-666b6646f7-vznhm\" (UID: \"22d4d35e-da87-4f57-a984-0bd8a4b10a3f\") " pod="openstack/dnsmasq-dns-666b6646f7-vznhm" Dec 05 17:31:54 crc kubenswrapper[4753]: I1205 17:31:54.864298 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g8lr4\" (UniqueName: \"kubernetes.io/projected/22d4d35e-da87-4f57-a984-0bd8a4b10a3f-kube-api-access-g8lr4\") pod \"dnsmasq-dns-666b6646f7-vznhm\" (UID: \"22d4d35e-da87-4f57-a984-0bd8a4b10a3f\") " pod="openstack/dnsmasq-dns-666b6646f7-vznhm" Dec 05 17:31:54 crc kubenswrapper[4753]: I1205 17:31:54.897779 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-ndvfw"] Dec 05 17:31:54 crc kubenswrapper[4753]: I1205 17:31:54.915867 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-rchhw"] Dec 05 17:31:54 crc kubenswrapper[4753]: I1205 17:31:54.917768 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-rchhw" Dec 05 17:31:54 crc kubenswrapper[4753]: I1205 17:31:54.923227 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-rchhw"] Dec 05 17:31:54 crc kubenswrapper[4753]: I1205 17:31:54.990912 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-vznhm" Dec 05 17:31:55 crc kubenswrapper[4753]: I1205 17:31:55.041795 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v2sgt\" (UniqueName: \"kubernetes.io/projected/30fe3d68-2fa2-416b-abac-b804b056a8af-kube-api-access-v2sgt\") pod \"dnsmasq-dns-57d769cc4f-rchhw\" (UID: \"30fe3d68-2fa2-416b-abac-b804b056a8af\") " pod="openstack/dnsmasq-dns-57d769cc4f-rchhw" Dec 05 17:31:55 crc kubenswrapper[4753]: I1205 17:31:55.041857 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/30fe3d68-2fa2-416b-abac-b804b056a8af-config\") pod \"dnsmasq-dns-57d769cc4f-rchhw\" (UID: \"30fe3d68-2fa2-416b-abac-b804b056a8af\") " pod="openstack/dnsmasq-dns-57d769cc4f-rchhw" Dec 05 17:31:55 crc kubenswrapper[4753]: I1205 17:31:55.041916 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/30fe3d68-2fa2-416b-abac-b804b056a8af-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-rchhw\" (UID: \"30fe3d68-2fa2-416b-abac-b804b056a8af\") " pod="openstack/dnsmasq-dns-57d769cc4f-rchhw" Dec 05 17:31:55 crc kubenswrapper[4753]: I1205 17:31:55.144866 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/30fe3d68-2fa2-416b-abac-b804b056a8af-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-rchhw\" (UID: \"30fe3d68-2fa2-416b-abac-b804b056a8af\") " pod="openstack/dnsmasq-dns-57d769cc4f-rchhw" Dec 05 17:31:55 crc kubenswrapper[4753]: I1205 17:31:55.145885 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/30fe3d68-2fa2-416b-abac-b804b056a8af-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-rchhw\" (UID: \"30fe3d68-2fa2-416b-abac-b804b056a8af\") " pod="openstack/dnsmasq-dns-57d769cc4f-rchhw" Dec 05 17:31:55 crc kubenswrapper[4753]: I1205 17:31:55.145988 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v2sgt\" (UniqueName: \"kubernetes.io/projected/30fe3d68-2fa2-416b-abac-b804b056a8af-kube-api-access-v2sgt\") pod \"dnsmasq-dns-57d769cc4f-rchhw\" (UID: \"30fe3d68-2fa2-416b-abac-b804b056a8af\") " pod="openstack/dnsmasq-dns-57d769cc4f-rchhw" Dec 05 17:31:55 crc kubenswrapper[4753]: I1205 17:31:55.146022 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/30fe3d68-2fa2-416b-abac-b804b056a8af-config\") pod \"dnsmasq-dns-57d769cc4f-rchhw\" (UID: \"30fe3d68-2fa2-416b-abac-b804b056a8af\") " pod="openstack/dnsmasq-dns-57d769cc4f-rchhw" Dec 05 17:31:55 crc kubenswrapper[4753]: I1205 17:31:55.146702 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/30fe3d68-2fa2-416b-abac-b804b056a8af-config\") pod \"dnsmasq-dns-57d769cc4f-rchhw\" (UID: \"30fe3d68-2fa2-416b-abac-b804b056a8af\") " pod="openstack/dnsmasq-dns-57d769cc4f-rchhw" Dec 05 17:31:55 crc kubenswrapper[4753]: I1205 17:31:55.164790 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v2sgt\" (UniqueName: \"kubernetes.io/projected/30fe3d68-2fa2-416b-abac-b804b056a8af-kube-api-access-v2sgt\") pod \"dnsmasq-dns-57d769cc4f-rchhw\" (UID: \"30fe3d68-2fa2-416b-abac-b804b056a8af\") " pod="openstack/dnsmasq-dns-57d769cc4f-rchhw" Dec 05 17:31:55 crc kubenswrapper[4753]: I1205 17:31:55.256182 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-rchhw" Dec 05 17:31:55 crc kubenswrapper[4753]: I1205 17:31:55.311517 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-vznhm"] Dec 05 17:31:55 crc kubenswrapper[4753]: W1205 17:31:55.324577 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod22d4d35e_da87_4f57_a984_0bd8a4b10a3f.slice/crio-34244e18449a7b1dfec03911f275735a658e5e78ae586ab4f95c0c8f46d7e8b3 WatchSource:0}: Error finding container 34244e18449a7b1dfec03911f275735a658e5e78ae586ab4f95c0c8f46d7e8b3: Status 404 returned error can't find the container with id 34244e18449a7b1dfec03911f275735a658e5e78ae586ab4f95c0c8f46d7e8b3 Dec 05 17:31:55 crc kubenswrapper[4753]: I1205 17:31:55.568957 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-rchhw"] Dec 05 17:31:55 crc kubenswrapper[4753]: W1205 17:31:55.572105 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod30fe3d68_2fa2_416b_abac_b804b056a8af.slice/crio-6c45f8bbde4ee5e19bf5eff6b7eebcf0610dcb40a5ad058f9094e0fb81bf2967 WatchSource:0}: Error finding container 6c45f8bbde4ee5e19bf5eff6b7eebcf0610dcb40a5ad058f9094e0fb81bf2967: Status 404 returned error can't find the container with id 6c45f8bbde4ee5e19bf5eff6b7eebcf0610dcb40a5ad058f9094e0fb81bf2967 Dec 05 17:31:55 crc kubenswrapper[4753]: I1205 17:31:55.758358 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 17:31:55 crc kubenswrapper[4753]: I1205 17:31:55.759559 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 17:31:55 crc kubenswrapper[4753]: I1205 17:31:55.764253 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 05 17:31:55 crc kubenswrapper[4753]: I1205 17:31:55.764303 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 05 17:31:55 crc kubenswrapper[4753]: I1205 17:31:55.764300 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Dec 05 17:31:55 crc kubenswrapper[4753]: I1205 17:31:55.764233 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 05 17:31:55 crc kubenswrapper[4753]: I1205 17:31:55.764509 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-lqvgb" Dec 05 17:31:55 crc kubenswrapper[4753]: I1205 17:31:55.764554 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 05 17:31:55 crc kubenswrapper[4753]: I1205 17:31:55.765071 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Dec 05 17:31:55 crc kubenswrapper[4753]: I1205 17:31:55.773168 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 17:31:55 crc kubenswrapper[4753]: I1205 17:31:55.856442 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-b8025f92-62ab-4c6b-8781-4ac1792398cb\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b8025f92-62ab-4c6b-8781-4ac1792398cb\") pod \"rabbitmq-server-0\" (UID: \"36088f6e-7c3c-4fee-918e-e1ee91bf6b33\") " pod="openstack/rabbitmq-server-0" Dec 05 17:31:55 crc kubenswrapper[4753]: I1205 17:31:55.856490 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/36088f6e-7c3c-4fee-918e-e1ee91bf6b33-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"36088f6e-7c3c-4fee-918e-e1ee91bf6b33\") " pod="openstack/rabbitmq-server-0" Dec 05 17:31:55 crc kubenswrapper[4753]: I1205 17:31:55.856558 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lskb5\" (UniqueName: \"kubernetes.io/projected/36088f6e-7c3c-4fee-918e-e1ee91bf6b33-kube-api-access-lskb5\") pod \"rabbitmq-server-0\" (UID: \"36088f6e-7c3c-4fee-918e-e1ee91bf6b33\") " pod="openstack/rabbitmq-server-0" Dec 05 17:31:55 crc kubenswrapper[4753]: I1205 17:31:55.856597 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/36088f6e-7c3c-4fee-918e-e1ee91bf6b33-config-data\") pod \"rabbitmq-server-0\" (UID: \"36088f6e-7c3c-4fee-918e-e1ee91bf6b33\") " pod="openstack/rabbitmq-server-0" Dec 05 17:31:55 crc kubenswrapper[4753]: I1205 17:31:55.856613 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/36088f6e-7c3c-4fee-918e-e1ee91bf6b33-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"36088f6e-7c3c-4fee-918e-e1ee91bf6b33\") " pod="openstack/rabbitmq-server-0" Dec 05 17:31:55 crc kubenswrapper[4753]: I1205 17:31:55.856645 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/36088f6e-7c3c-4fee-918e-e1ee91bf6b33-pod-info\") pod \"rabbitmq-server-0\" (UID: \"36088f6e-7c3c-4fee-918e-e1ee91bf6b33\") " pod="openstack/rabbitmq-server-0" Dec 05 17:31:55 crc kubenswrapper[4753]: I1205 17:31:55.856729 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/36088f6e-7c3c-4fee-918e-e1ee91bf6b33-server-conf\") pod \"rabbitmq-server-0\" (UID: \"36088f6e-7c3c-4fee-918e-e1ee91bf6b33\") " pod="openstack/rabbitmq-server-0" Dec 05 17:31:55 crc kubenswrapper[4753]: I1205 17:31:55.856769 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/36088f6e-7c3c-4fee-918e-e1ee91bf6b33-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"36088f6e-7c3c-4fee-918e-e1ee91bf6b33\") " pod="openstack/rabbitmq-server-0" Dec 05 17:31:55 crc kubenswrapper[4753]: I1205 17:31:55.856789 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/36088f6e-7c3c-4fee-918e-e1ee91bf6b33-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"36088f6e-7c3c-4fee-918e-e1ee91bf6b33\") " pod="openstack/rabbitmq-server-0" Dec 05 17:31:55 crc kubenswrapper[4753]: I1205 17:31:55.856805 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/36088f6e-7c3c-4fee-918e-e1ee91bf6b33-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"36088f6e-7c3c-4fee-918e-e1ee91bf6b33\") " pod="openstack/rabbitmq-server-0" Dec 05 17:31:55 crc kubenswrapper[4753]: I1205 17:31:55.856853 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/36088f6e-7c3c-4fee-918e-e1ee91bf6b33-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"36088f6e-7c3c-4fee-918e-e1ee91bf6b33\") " pod="openstack/rabbitmq-server-0" Dec 05 17:31:55 crc kubenswrapper[4753]: I1205 17:31:55.957848 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/36088f6e-7c3c-4fee-918e-e1ee91bf6b33-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"36088f6e-7c3c-4fee-918e-e1ee91bf6b33\") " pod="openstack/rabbitmq-server-0" Dec 05 17:31:55 crc kubenswrapper[4753]: I1205 17:31:55.957893 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/36088f6e-7c3c-4fee-918e-e1ee91bf6b33-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"36088f6e-7c3c-4fee-918e-e1ee91bf6b33\") " pod="openstack/rabbitmq-server-0" Dec 05 17:31:55 crc kubenswrapper[4753]: I1205 17:31:55.957918 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/36088f6e-7c3c-4fee-918e-e1ee91bf6b33-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"36088f6e-7c3c-4fee-918e-e1ee91bf6b33\") " pod="openstack/rabbitmq-server-0" Dec 05 17:31:55 crc kubenswrapper[4753]: I1205 17:31:55.957972 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/36088f6e-7c3c-4fee-918e-e1ee91bf6b33-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"36088f6e-7c3c-4fee-918e-e1ee91bf6b33\") " pod="openstack/rabbitmq-server-0" Dec 05 17:31:55 crc kubenswrapper[4753]: I1205 17:31:55.957998 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-b8025f92-62ab-4c6b-8781-4ac1792398cb\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b8025f92-62ab-4c6b-8781-4ac1792398cb\") pod \"rabbitmq-server-0\" (UID: \"36088f6e-7c3c-4fee-918e-e1ee91bf6b33\") " pod="openstack/rabbitmq-server-0" Dec 05 17:31:55 crc kubenswrapper[4753]: I1205 17:31:55.958018 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/36088f6e-7c3c-4fee-918e-e1ee91bf6b33-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"36088f6e-7c3c-4fee-918e-e1ee91bf6b33\") " pod="openstack/rabbitmq-server-0" Dec 05 17:31:55 crc kubenswrapper[4753]: I1205 17:31:55.958054 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lskb5\" (UniqueName: \"kubernetes.io/projected/36088f6e-7c3c-4fee-918e-e1ee91bf6b33-kube-api-access-lskb5\") pod \"rabbitmq-server-0\" (UID: \"36088f6e-7c3c-4fee-918e-e1ee91bf6b33\") " pod="openstack/rabbitmq-server-0" Dec 05 17:31:55 crc kubenswrapper[4753]: I1205 17:31:55.958071 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/36088f6e-7c3c-4fee-918e-e1ee91bf6b33-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"36088f6e-7c3c-4fee-918e-e1ee91bf6b33\") " pod="openstack/rabbitmq-server-0" Dec 05 17:31:55 crc kubenswrapper[4753]: I1205 17:31:55.958086 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/36088f6e-7c3c-4fee-918e-e1ee91bf6b33-pod-info\") pod \"rabbitmq-server-0\" (UID: \"36088f6e-7c3c-4fee-918e-e1ee91bf6b33\") " pod="openstack/rabbitmq-server-0" Dec 05 17:31:55 crc kubenswrapper[4753]: I1205 17:31:55.958101 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/36088f6e-7c3c-4fee-918e-e1ee91bf6b33-config-data\") pod \"rabbitmq-server-0\" (UID: \"36088f6e-7c3c-4fee-918e-e1ee91bf6b33\") " pod="openstack/rabbitmq-server-0" Dec 05 17:31:55 crc kubenswrapper[4753]: I1205 17:31:55.958172 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/36088f6e-7c3c-4fee-918e-e1ee91bf6b33-server-conf\") pod \"rabbitmq-server-0\" (UID: \"36088f6e-7c3c-4fee-918e-e1ee91bf6b33\") " pod="openstack/rabbitmq-server-0" Dec 05 17:31:55 crc kubenswrapper[4753]: I1205 17:31:55.958464 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/36088f6e-7c3c-4fee-918e-e1ee91bf6b33-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"36088f6e-7c3c-4fee-918e-e1ee91bf6b33\") " pod="openstack/rabbitmq-server-0" Dec 05 17:31:55 crc kubenswrapper[4753]: I1205 17:31:55.959111 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/36088f6e-7c3c-4fee-918e-e1ee91bf6b33-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"36088f6e-7c3c-4fee-918e-e1ee91bf6b33\") " pod="openstack/rabbitmq-server-0" Dec 05 17:31:55 crc kubenswrapper[4753]: I1205 17:31:55.959239 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/36088f6e-7c3c-4fee-918e-e1ee91bf6b33-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"36088f6e-7c3c-4fee-918e-e1ee91bf6b33\") " pod="openstack/rabbitmq-server-0" Dec 05 17:31:55 crc kubenswrapper[4753]: I1205 17:31:55.959274 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/36088f6e-7c3c-4fee-918e-e1ee91bf6b33-config-data\") pod \"rabbitmq-server-0\" (UID: \"36088f6e-7c3c-4fee-918e-e1ee91bf6b33\") " pod="openstack/rabbitmq-server-0" Dec 05 17:31:55 crc kubenswrapper[4753]: I1205 17:31:55.960535 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/36088f6e-7c3c-4fee-918e-e1ee91bf6b33-server-conf\") pod \"rabbitmq-server-0\" (UID: \"36088f6e-7c3c-4fee-918e-e1ee91bf6b33\") " pod="openstack/rabbitmq-server-0" Dec 05 17:31:55 crc kubenswrapper[4753]: I1205 17:31:55.962994 4753 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 05 17:31:55 crc kubenswrapper[4753]: I1205 17:31:55.963028 4753 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-b8025f92-62ab-4c6b-8781-4ac1792398cb\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b8025f92-62ab-4c6b-8781-4ac1792398cb\") pod \"rabbitmq-server-0\" (UID: \"36088f6e-7c3c-4fee-918e-e1ee91bf6b33\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/b0f1c3c4ab4522717d685dbc018080e196fc39755508fe7e89724afa9df5553f/globalmount\"" pod="openstack/rabbitmq-server-0" Dec 05 17:31:55 crc kubenswrapper[4753]: I1205 17:31:55.963551 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/36088f6e-7c3c-4fee-918e-e1ee91bf6b33-pod-info\") pod \"rabbitmq-server-0\" (UID: \"36088f6e-7c3c-4fee-918e-e1ee91bf6b33\") " pod="openstack/rabbitmq-server-0" Dec 05 17:31:55 crc kubenswrapper[4753]: I1205 17:31:55.964655 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/36088f6e-7c3c-4fee-918e-e1ee91bf6b33-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"36088f6e-7c3c-4fee-918e-e1ee91bf6b33\") " pod="openstack/rabbitmq-server-0" Dec 05 17:31:55 crc kubenswrapper[4753]: I1205 17:31:55.964747 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/36088f6e-7c3c-4fee-918e-e1ee91bf6b33-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"36088f6e-7c3c-4fee-918e-e1ee91bf6b33\") " pod="openstack/rabbitmq-server-0" Dec 05 17:31:55 crc kubenswrapper[4753]: I1205 17:31:55.975408 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lskb5\" (UniqueName: \"kubernetes.io/projected/36088f6e-7c3c-4fee-918e-e1ee91bf6b33-kube-api-access-lskb5\") pod \"rabbitmq-server-0\" (UID: \"36088f6e-7c3c-4fee-918e-e1ee91bf6b33\") " pod="openstack/rabbitmq-server-0" Dec 05 17:31:55 crc kubenswrapper[4753]: I1205 17:31:55.989139 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/36088f6e-7c3c-4fee-918e-e1ee91bf6b33-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"36088f6e-7c3c-4fee-918e-e1ee91bf6b33\") " pod="openstack/rabbitmq-server-0" Dec 05 17:31:55 crc kubenswrapper[4753]: I1205 17:31:55.999516 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-b8025f92-62ab-4c6b-8781-4ac1792398cb\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b8025f92-62ab-4c6b-8781-4ac1792398cb\") pod \"rabbitmq-server-0\" (UID: \"36088f6e-7c3c-4fee-918e-e1ee91bf6b33\") " pod="openstack/rabbitmq-server-0" Dec 05 17:31:56 crc kubenswrapper[4753]: I1205 17:31:56.046452 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 17:31:56 crc kubenswrapper[4753]: I1205 17:31:56.084679 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-rchhw" event={"ID":"30fe3d68-2fa2-416b-abac-b804b056a8af","Type":"ContainerStarted","Data":"6c45f8bbde4ee5e19bf5eff6b7eebcf0610dcb40a5ad058f9094e0fb81bf2967"} Dec 05 17:31:56 crc kubenswrapper[4753]: I1205 17:31:56.084746 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 17:31:56 crc kubenswrapper[4753]: I1205 17:31:56.084764 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-vznhm" event={"ID":"22d4d35e-da87-4f57-a984-0bd8a4b10a3f","Type":"ContainerStarted","Data":"34244e18449a7b1dfec03911f275735a658e5e78ae586ab4f95c0c8f46d7e8b3"} Dec 05 17:31:56 crc kubenswrapper[4753]: I1205 17:31:56.084863 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:31:56 crc kubenswrapper[4753]: I1205 17:31:56.089590 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 17:31:56 crc kubenswrapper[4753]: I1205 17:31:56.090164 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-js6jz" Dec 05 17:31:56 crc kubenswrapper[4753]: I1205 17:31:56.090284 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 05 17:31:56 crc kubenswrapper[4753]: I1205 17:31:56.090437 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 05 17:31:56 crc kubenswrapper[4753]: I1205 17:31:56.090546 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Dec 05 17:31:56 crc kubenswrapper[4753]: I1205 17:31:56.090670 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 05 17:31:56 crc kubenswrapper[4753]: I1205 17:31:56.090722 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Dec 05 17:31:56 crc kubenswrapper[4753]: I1205 17:31:56.090817 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 05 17:31:56 crc kubenswrapper[4753]: I1205 17:31:56.175897 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5m27l\" (UniqueName: \"kubernetes.io/projected/f5652c22-3bf2-454d-a4cf-fd0378f133b8-kube-api-access-5m27l\") pod \"rabbitmq-cell1-server-0\" (UID: \"f5652c22-3bf2-454d-a4cf-fd0378f133b8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:31:56 crc kubenswrapper[4753]: I1205 17:31:56.175957 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/f5652c22-3bf2-454d-a4cf-fd0378f133b8-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"f5652c22-3bf2-454d-a4cf-fd0378f133b8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:31:56 crc kubenswrapper[4753]: I1205 17:31:56.176012 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-580d11aa-59c8-4f4f-99a0-0bfb50f7476c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-580d11aa-59c8-4f4f-99a0-0bfb50f7476c\") pod \"rabbitmq-cell1-server-0\" (UID: \"f5652c22-3bf2-454d-a4cf-fd0378f133b8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:31:56 crc kubenswrapper[4753]: I1205 17:31:56.176053 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/f5652c22-3bf2-454d-a4cf-fd0378f133b8-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"f5652c22-3bf2-454d-a4cf-fd0378f133b8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:31:56 crc kubenswrapper[4753]: I1205 17:31:56.176162 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/f5652c22-3bf2-454d-a4cf-fd0378f133b8-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"f5652c22-3bf2-454d-a4cf-fd0378f133b8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:31:56 crc kubenswrapper[4753]: I1205 17:31:56.176275 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/f5652c22-3bf2-454d-a4cf-fd0378f133b8-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"f5652c22-3bf2-454d-a4cf-fd0378f133b8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:31:56 crc kubenswrapper[4753]: I1205 17:31:56.176305 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f5652c22-3bf2-454d-a4cf-fd0378f133b8-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"f5652c22-3bf2-454d-a4cf-fd0378f133b8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:31:56 crc kubenswrapper[4753]: I1205 17:31:56.176331 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/f5652c22-3bf2-454d-a4cf-fd0378f133b8-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"f5652c22-3bf2-454d-a4cf-fd0378f133b8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:31:56 crc kubenswrapper[4753]: I1205 17:31:56.176363 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/f5652c22-3bf2-454d-a4cf-fd0378f133b8-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"f5652c22-3bf2-454d-a4cf-fd0378f133b8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:31:56 crc kubenswrapper[4753]: I1205 17:31:56.176392 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/f5652c22-3bf2-454d-a4cf-fd0378f133b8-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"f5652c22-3bf2-454d-a4cf-fd0378f133b8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:31:56 crc kubenswrapper[4753]: I1205 17:31:56.176418 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/f5652c22-3bf2-454d-a4cf-fd0378f133b8-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"f5652c22-3bf2-454d-a4cf-fd0378f133b8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:31:56 crc kubenswrapper[4753]: I1205 17:31:56.286483 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/f5652c22-3bf2-454d-a4cf-fd0378f133b8-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"f5652c22-3bf2-454d-a4cf-fd0378f133b8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:31:56 crc kubenswrapper[4753]: I1205 17:31:56.287056 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/f5652c22-3bf2-454d-a4cf-fd0378f133b8-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"f5652c22-3bf2-454d-a4cf-fd0378f133b8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:31:56 crc kubenswrapper[4753]: I1205 17:31:56.287099 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/f5652c22-3bf2-454d-a4cf-fd0378f133b8-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"f5652c22-3bf2-454d-a4cf-fd0378f133b8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:31:56 crc kubenswrapper[4753]: I1205 17:31:56.287133 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/f5652c22-3bf2-454d-a4cf-fd0378f133b8-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"f5652c22-3bf2-454d-a4cf-fd0378f133b8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:31:56 crc kubenswrapper[4753]: I1205 17:31:56.287235 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5m27l\" (UniqueName: \"kubernetes.io/projected/f5652c22-3bf2-454d-a4cf-fd0378f133b8-kube-api-access-5m27l\") pod \"rabbitmq-cell1-server-0\" (UID: \"f5652c22-3bf2-454d-a4cf-fd0378f133b8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:31:56 crc kubenswrapper[4753]: I1205 17:31:56.287272 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/f5652c22-3bf2-454d-a4cf-fd0378f133b8-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"f5652c22-3bf2-454d-a4cf-fd0378f133b8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:31:56 crc kubenswrapper[4753]: I1205 17:31:56.287306 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-580d11aa-59c8-4f4f-99a0-0bfb50f7476c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-580d11aa-59c8-4f4f-99a0-0bfb50f7476c\") pod \"rabbitmq-cell1-server-0\" (UID: \"f5652c22-3bf2-454d-a4cf-fd0378f133b8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:31:56 crc kubenswrapper[4753]: I1205 17:31:56.287337 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/f5652c22-3bf2-454d-a4cf-fd0378f133b8-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"f5652c22-3bf2-454d-a4cf-fd0378f133b8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:31:56 crc kubenswrapper[4753]: I1205 17:31:56.287386 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/f5652c22-3bf2-454d-a4cf-fd0378f133b8-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"f5652c22-3bf2-454d-a4cf-fd0378f133b8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:31:56 crc kubenswrapper[4753]: I1205 17:31:56.287499 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/f5652c22-3bf2-454d-a4cf-fd0378f133b8-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"f5652c22-3bf2-454d-a4cf-fd0378f133b8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:31:56 crc kubenswrapper[4753]: I1205 17:31:56.287526 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f5652c22-3bf2-454d-a4cf-fd0378f133b8-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"f5652c22-3bf2-454d-a4cf-fd0378f133b8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:31:56 crc kubenswrapper[4753]: I1205 17:31:56.288653 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f5652c22-3bf2-454d-a4cf-fd0378f133b8-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"f5652c22-3bf2-454d-a4cf-fd0378f133b8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:31:56 crc kubenswrapper[4753]: I1205 17:31:56.288991 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/f5652c22-3bf2-454d-a4cf-fd0378f133b8-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"f5652c22-3bf2-454d-a4cf-fd0378f133b8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:31:56 crc kubenswrapper[4753]: I1205 17:31:56.293037 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/f5652c22-3bf2-454d-a4cf-fd0378f133b8-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"f5652c22-3bf2-454d-a4cf-fd0378f133b8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:31:56 crc kubenswrapper[4753]: I1205 17:31:56.294226 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/f5652c22-3bf2-454d-a4cf-fd0378f133b8-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"f5652c22-3bf2-454d-a4cf-fd0378f133b8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:31:56 crc kubenswrapper[4753]: I1205 17:31:56.300092 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/f5652c22-3bf2-454d-a4cf-fd0378f133b8-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"f5652c22-3bf2-454d-a4cf-fd0378f133b8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:31:56 crc kubenswrapper[4753]: I1205 17:31:56.301129 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/f5652c22-3bf2-454d-a4cf-fd0378f133b8-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"f5652c22-3bf2-454d-a4cf-fd0378f133b8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:31:56 crc kubenswrapper[4753]: I1205 17:31:56.301950 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/f5652c22-3bf2-454d-a4cf-fd0378f133b8-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"f5652c22-3bf2-454d-a4cf-fd0378f133b8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:31:56 crc kubenswrapper[4753]: I1205 17:31:56.302583 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/f5652c22-3bf2-454d-a4cf-fd0378f133b8-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"f5652c22-3bf2-454d-a4cf-fd0378f133b8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:31:56 crc kubenswrapper[4753]: I1205 17:31:56.324817 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/f5652c22-3bf2-454d-a4cf-fd0378f133b8-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"f5652c22-3bf2-454d-a4cf-fd0378f133b8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:31:56 crc kubenswrapper[4753]: I1205 17:31:56.348526 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5m27l\" (UniqueName: \"kubernetes.io/projected/f5652c22-3bf2-454d-a4cf-fd0378f133b8-kube-api-access-5m27l\") pod \"rabbitmq-cell1-server-0\" (UID: \"f5652c22-3bf2-454d-a4cf-fd0378f133b8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:31:56 crc kubenswrapper[4753]: I1205 17:31:56.361470 4753 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 05 17:31:56 crc kubenswrapper[4753]: I1205 17:31:56.361507 4753 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-580d11aa-59c8-4f4f-99a0-0bfb50f7476c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-580d11aa-59c8-4f4f-99a0-0bfb50f7476c\") pod \"rabbitmq-cell1-server-0\" (UID: \"f5652c22-3bf2-454d-a4cf-fd0378f133b8\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/368f5579209aa91fd91b7b3687bd307acc8a6215d480045ca9a7d2574e8a3831/globalmount\"" pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:31:56 crc kubenswrapper[4753]: I1205 17:31:56.396793 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-580d11aa-59c8-4f4f-99a0-0bfb50f7476c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-580d11aa-59c8-4f4f-99a0-0bfb50f7476c\") pod \"rabbitmq-cell1-server-0\" (UID: \"f5652c22-3bf2-454d-a4cf-fd0378f133b8\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:31:56 crc kubenswrapper[4753]: I1205 17:31:56.532884 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:31:56 crc kubenswrapper[4753]: I1205 17:31:56.755075 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 17:31:57 crc kubenswrapper[4753]: I1205 17:31:57.168872 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 17:31:57 crc kubenswrapper[4753]: I1205 17:31:57.572608 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Dec 05 17:31:57 crc kubenswrapper[4753]: I1205 17:31:57.574544 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 05 17:31:57 crc kubenswrapper[4753]: I1205 17:31:57.583694 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-pct9r" Dec 05 17:31:57 crc kubenswrapper[4753]: I1205 17:31:57.583893 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Dec 05 17:31:57 crc kubenswrapper[4753]: I1205 17:31:57.584006 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Dec 05 17:31:57 crc kubenswrapper[4753]: I1205 17:31:57.584755 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Dec 05 17:31:57 crc kubenswrapper[4753]: I1205 17:31:57.590004 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Dec 05 17:31:57 crc kubenswrapper[4753]: I1205 17:31:57.590450 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 05 17:31:57 crc kubenswrapper[4753]: I1205 17:31:57.614511 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/9df69769-e394-444f-b6e2-e788e989fe92-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"9df69769-e394-444f-b6e2-e788e989fe92\") " pod="openstack/openstack-galera-0" Dec 05 17:31:57 crc kubenswrapper[4753]: I1205 17:31:57.614567 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/9df69769-e394-444f-b6e2-e788e989fe92-kolla-config\") pod \"openstack-galera-0\" (UID: \"9df69769-e394-444f-b6e2-e788e989fe92\") " pod="openstack/openstack-galera-0" Dec 05 17:31:57 crc kubenswrapper[4753]: I1205 17:31:57.614589 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9df69769-e394-444f-b6e2-e788e989fe92-operator-scripts\") pod \"openstack-galera-0\" (UID: \"9df69769-e394-444f-b6e2-e788e989fe92\") " pod="openstack/openstack-galera-0" Dec 05 17:31:57 crc kubenswrapper[4753]: I1205 17:31:57.614624 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/9df69769-e394-444f-b6e2-e788e989fe92-config-data-generated\") pod \"openstack-galera-0\" (UID: \"9df69769-e394-444f-b6e2-e788e989fe92\") " pod="openstack/openstack-galera-0" Dec 05 17:31:57 crc kubenswrapper[4753]: I1205 17:31:57.614640 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9df69769-e394-444f-b6e2-e788e989fe92-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"9df69769-e394-444f-b6e2-e788e989fe92\") " pod="openstack/openstack-galera-0" Dec 05 17:31:57 crc kubenswrapper[4753]: I1205 17:31:57.614658 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/9df69769-e394-444f-b6e2-e788e989fe92-config-data-default\") pod \"openstack-galera-0\" (UID: \"9df69769-e394-444f-b6e2-e788e989fe92\") " pod="openstack/openstack-galera-0" Dec 05 17:31:57 crc kubenswrapper[4753]: I1205 17:31:57.614688 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-cbabc738-5093-4560-b066-251be4a88020\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-cbabc738-5093-4560-b066-251be4a88020\") pod \"openstack-galera-0\" (UID: \"9df69769-e394-444f-b6e2-e788e989fe92\") " pod="openstack/openstack-galera-0" Dec 05 17:31:57 crc kubenswrapper[4753]: I1205 17:31:57.614717 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vc59t\" (UniqueName: \"kubernetes.io/projected/9df69769-e394-444f-b6e2-e788e989fe92-kube-api-access-vc59t\") pod \"openstack-galera-0\" (UID: \"9df69769-e394-444f-b6e2-e788e989fe92\") " pod="openstack/openstack-galera-0" Dec 05 17:31:57 crc kubenswrapper[4753]: I1205 17:31:57.716332 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/9df69769-e394-444f-b6e2-e788e989fe92-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"9df69769-e394-444f-b6e2-e788e989fe92\") " pod="openstack/openstack-galera-0" Dec 05 17:31:57 crc kubenswrapper[4753]: I1205 17:31:57.716403 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/9df69769-e394-444f-b6e2-e788e989fe92-kolla-config\") pod \"openstack-galera-0\" (UID: \"9df69769-e394-444f-b6e2-e788e989fe92\") " pod="openstack/openstack-galera-0" Dec 05 17:31:57 crc kubenswrapper[4753]: I1205 17:31:57.716429 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9df69769-e394-444f-b6e2-e788e989fe92-operator-scripts\") pod \"openstack-galera-0\" (UID: \"9df69769-e394-444f-b6e2-e788e989fe92\") " pod="openstack/openstack-galera-0" Dec 05 17:31:57 crc kubenswrapper[4753]: I1205 17:31:57.716486 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/9df69769-e394-444f-b6e2-e788e989fe92-config-data-generated\") pod \"openstack-galera-0\" (UID: \"9df69769-e394-444f-b6e2-e788e989fe92\") " pod="openstack/openstack-galera-0" Dec 05 17:31:57 crc kubenswrapper[4753]: I1205 17:31:57.716504 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9df69769-e394-444f-b6e2-e788e989fe92-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"9df69769-e394-444f-b6e2-e788e989fe92\") " pod="openstack/openstack-galera-0" Dec 05 17:31:57 crc kubenswrapper[4753]: I1205 17:31:57.716572 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/9df69769-e394-444f-b6e2-e788e989fe92-config-data-default\") pod \"openstack-galera-0\" (UID: \"9df69769-e394-444f-b6e2-e788e989fe92\") " pod="openstack/openstack-galera-0" Dec 05 17:31:57 crc kubenswrapper[4753]: I1205 17:31:57.716903 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-cbabc738-5093-4560-b066-251be4a88020\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-cbabc738-5093-4560-b066-251be4a88020\") pod \"openstack-galera-0\" (UID: \"9df69769-e394-444f-b6e2-e788e989fe92\") " pod="openstack/openstack-galera-0" Dec 05 17:31:57 crc kubenswrapper[4753]: I1205 17:31:57.716945 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vc59t\" (UniqueName: \"kubernetes.io/projected/9df69769-e394-444f-b6e2-e788e989fe92-kube-api-access-vc59t\") pod \"openstack-galera-0\" (UID: \"9df69769-e394-444f-b6e2-e788e989fe92\") " pod="openstack/openstack-galera-0" Dec 05 17:31:57 crc kubenswrapper[4753]: I1205 17:31:57.717276 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/9df69769-e394-444f-b6e2-e788e989fe92-kolla-config\") pod \"openstack-galera-0\" (UID: \"9df69769-e394-444f-b6e2-e788e989fe92\") " pod="openstack/openstack-galera-0" Dec 05 17:31:57 crc kubenswrapper[4753]: I1205 17:31:57.717798 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/9df69769-e394-444f-b6e2-e788e989fe92-config-data-default\") pod \"openstack-galera-0\" (UID: \"9df69769-e394-444f-b6e2-e788e989fe92\") " pod="openstack/openstack-galera-0" Dec 05 17:31:57 crc kubenswrapper[4753]: I1205 17:31:57.721812 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9df69769-e394-444f-b6e2-e788e989fe92-operator-scripts\") pod \"openstack-galera-0\" (UID: \"9df69769-e394-444f-b6e2-e788e989fe92\") " pod="openstack/openstack-galera-0" Dec 05 17:31:57 crc kubenswrapper[4753]: I1205 17:31:57.730042 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/9df69769-e394-444f-b6e2-e788e989fe92-config-data-generated\") pod \"openstack-galera-0\" (UID: \"9df69769-e394-444f-b6e2-e788e989fe92\") " pod="openstack/openstack-galera-0" Dec 05 17:31:57 crc kubenswrapper[4753]: I1205 17:31:57.734844 4753 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 05 17:31:57 crc kubenswrapper[4753]: I1205 17:31:57.735077 4753 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-cbabc738-5093-4560-b066-251be4a88020\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-cbabc738-5093-4560-b066-251be4a88020\") pod \"openstack-galera-0\" (UID: \"9df69769-e394-444f-b6e2-e788e989fe92\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/2c75ab8bfaa61d85ca1aba15423c2401e8f31a37d7b441408714789d95d23176/globalmount\"" pod="openstack/openstack-galera-0" Dec 05 17:31:57 crc kubenswrapper[4753]: I1205 17:31:57.740353 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9df69769-e394-444f-b6e2-e788e989fe92-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"9df69769-e394-444f-b6e2-e788e989fe92\") " pod="openstack/openstack-galera-0" Dec 05 17:31:57 crc kubenswrapper[4753]: I1205 17:31:57.742741 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/9df69769-e394-444f-b6e2-e788e989fe92-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"9df69769-e394-444f-b6e2-e788e989fe92\") " pod="openstack/openstack-galera-0" Dec 05 17:31:57 crc kubenswrapper[4753]: I1205 17:31:57.755978 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vc59t\" (UniqueName: \"kubernetes.io/projected/9df69769-e394-444f-b6e2-e788e989fe92-kube-api-access-vc59t\") pod \"openstack-galera-0\" (UID: \"9df69769-e394-444f-b6e2-e788e989fe92\") " pod="openstack/openstack-galera-0" Dec 05 17:31:57 crc kubenswrapper[4753]: I1205 17:31:57.784789 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-cbabc738-5093-4560-b066-251be4a88020\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-cbabc738-5093-4560-b066-251be4a88020\") pod \"openstack-galera-0\" (UID: \"9df69769-e394-444f-b6e2-e788e989fe92\") " pod="openstack/openstack-galera-0" Dec 05 17:31:57 crc kubenswrapper[4753]: I1205 17:31:57.959560 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 05 17:31:58 crc kubenswrapper[4753]: I1205 17:31:58.780210 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 05 17:31:58 crc kubenswrapper[4753]: I1205 17:31:58.793435 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 05 17:31:58 crc kubenswrapper[4753]: I1205 17:31:58.799381 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Dec 05 17:31:58 crc kubenswrapper[4753]: I1205 17:31:58.799472 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Dec 05 17:31:58 crc kubenswrapper[4753]: I1205 17:31:58.799891 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Dec 05 17:31:58 crc kubenswrapper[4753]: I1205 17:31:58.799908 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-z7bsd" Dec 05 17:31:58 crc kubenswrapper[4753]: I1205 17:31:58.833370 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 05 17:31:58 crc kubenswrapper[4753]: I1205 17:31:58.960025 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v5fzj\" (UniqueName: \"kubernetes.io/projected/4c8d8a7a-38bd-49d9-8f25-5495c32462bc-kube-api-access-v5fzj\") pod \"openstack-cell1-galera-0\" (UID: \"4c8d8a7a-38bd-49d9-8f25-5495c32462bc\") " pod="openstack/openstack-cell1-galera-0" Dec 05 17:31:58 crc kubenswrapper[4753]: I1205 17:31:58.960088 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/4c8d8a7a-38bd-49d9-8f25-5495c32462bc-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"4c8d8a7a-38bd-49d9-8f25-5495c32462bc\") " pod="openstack/openstack-cell1-galera-0" Dec 05 17:31:58 crc kubenswrapper[4753]: I1205 17:31:58.960119 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/4c8d8a7a-38bd-49d9-8f25-5495c32462bc-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"4c8d8a7a-38bd-49d9-8f25-5495c32462bc\") " pod="openstack/openstack-cell1-galera-0" Dec 05 17:31:58 crc kubenswrapper[4753]: I1205 17:31:58.960161 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/4c8d8a7a-38bd-49d9-8f25-5495c32462bc-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"4c8d8a7a-38bd-49d9-8f25-5495c32462bc\") " pod="openstack/openstack-cell1-galera-0" Dec 05 17:31:58 crc kubenswrapper[4753]: I1205 17:31:58.960194 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-5630ed05-2b1c-420b-b615-51c2b8b604e5\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5630ed05-2b1c-420b-b615-51c2b8b604e5\") pod \"openstack-cell1-galera-0\" (UID: \"4c8d8a7a-38bd-49d9-8f25-5495c32462bc\") " pod="openstack/openstack-cell1-galera-0" Dec 05 17:31:58 crc kubenswrapper[4753]: I1205 17:31:58.960227 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/4c8d8a7a-38bd-49d9-8f25-5495c32462bc-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"4c8d8a7a-38bd-49d9-8f25-5495c32462bc\") " pod="openstack/openstack-cell1-galera-0" Dec 05 17:31:58 crc kubenswrapper[4753]: I1205 17:31:58.960248 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4c8d8a7a-38bd-49d9-8f25-5495c32462bc-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"4c8d8a7a-38bd-49d9-8f25-5495c32462bc\") " pod="openstack/openstack-cell1-galera-0" Dec 05 17:31:58 crc kubenswrapper[4753]: I1205 17:31:58.960301 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c8d8a7a-38bd-49d9-8f25-5495c32462bc-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"4c8d8a7a-38bd-49d9-8f25-5495c32462bc\") " pod="openstack/openstack-cell1-galera-0" Dec 05 17:31:58 crc kubenswrapper[4753]: I1205 17:31:58.978897 4753 patch_prober.go:28] interesting pod/machine-config-daemon-khn68 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 17:31:58 crc kubenswrapper[4753]: I1205 17:31:58.978948 4753 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 17:31:59 crc kubenswrapper[4753]: I1205 17:31:59.061438 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-5630ed05-2b1c-420b-b615-51c2b8b604e5\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5630ed05-2b1c-420b-b615-51c2b8b604e5\") pod \"openstack-cell1-galera-0\" (UID: \"4c8d8a7a-38bd-49d9-8f25-5495c32462bc\") " pod="openstack/openstack-cell1-galera-0" Dec 05 17:31:59 crc kubenswrapper[4753]: I1205 17:31:59.062188 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/4c8d8a7a-38bd-49d9-8f25-5495c32462bc-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"4c8d8a7a-38bd-49d9-8f25-5495c32462bc\") " pod="openstack/openstack-cell1-galera-0" Dec 05 17:31:59 crc kubenswrapper[4753]: I1205 17:31:59.062225 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4c8d8a7a-38bd-49d9-8f25-5495c32462bc-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"4c8d8a7a-38bd-49d9-8f25-5495c32462bc\") " pod="openstack/openstack-cell1-galera-0" Dec 05 17:31:59 crc kubenswrapper[4753]: I1205 17:31:59.062390 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c8d8a7a-38bd-49d9-8f25-5495c32462bc-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"4c8d8a7a-38bd-49d9-8f25-5495c32462bc\") " pod="openstack/openstack-cell1-galera-0" Dec 05 17:31:59 crc kubenswrapper[4753]: I1205 17:31:59.062537 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v5fzj\" (UniqueName: \"kubernetes.io/projected/4c8d8a7a-38bd-49d9-8f25-5495c32462bc-kube-api-access-v5fzj\") pod \"openstack-cell1-galera-0\" (UID: \"4c8d8a7a-38bd-49d9-8f25-5495c32462bc\") " pod="openstack/openstack-cell1-galera-0" Dec 05 17:31:59 crc kubenswrapper[4753]: I1205 17:31:59.062684 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/4c8d8a7a-38bd-49d9-8f25-5495c32462bc-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"4c8d8a7a-38bd-49d9-8f25-5495c32462bc\") " pod="openstack/openstack-cell1-galera-0" Dec 05 17:31:59 crc kubenswrapper[4753]: I1205 17:31:59.062717 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/4c8d8a7a-38bd-49d9-8f25-5495c32462bc-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"4c8d8a7a-38bd-49d9-8f25-5495c32462bc\") " pod="openstack/openstack-cell1-galera-0" Dec 05 17:31:59 crc kubenswrapper[4753]: I1205 17:31:59.062876 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/4c8d8a7a-38bd-49d9-8f25-5495c32462bc-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"4c8d8a7a-38bd-49d9-8f25-5495c32462bc\") " pod="openstack/openstack-cell1-galera-0" Dec 05 17:31:59 crc kubenswrapper[4753]: I1205 17:31:59.063450 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/4c8d8a7a-38bd-49d9-8f25-5495c32462bc-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"4c8d8a7a-38bd-49d9-8f25-5495c32462bc\") " pod="openstack/openstack-cell1-galera-0" Dec 05 17:31:59 crc kubenswrapper[4753]: I1205 17:31:59.063722 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/4c8d8a7a-38bd-49d9-8f25-5495c32462bc-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"4c8d8a7a-38bd-49d9-8f25-5495c32462bc\") " pod="openstack/openstack-cell1-galera-0" Dec 05 17:31:59 crc kubenswrapper[4753]: I1205 17:31:59.064116 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/4c8d8a7a-38bd-49d9-8f25-5495c32462bc-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"4c8d8a7a-38bd-49d9-8f25-5495c32462bc\") " pod="openstack/openstack-cell1-galera-0" Dec 05 17:31:59 crc kubenswrapper[4753]: I1205 17:31:59.066694 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4c8d8a7a-38bd-49d9-8f25-5495c32462bc-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"4c8d8a7a-38bd-49d9-8f25-5495c32462bc\") " pod="openstack/openstack-cell1-galera-0" Dec 05 17:31:59 crc kubenswrapper[4753]: I1205 17:31:59.072664 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/4c8d8a7a-38bd-49d9-8f25-5495c32462bc-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"4c8d8a7a-38bd-49d9-8f25-5495c32462bc\") " pod="openstack/openstack-cell1-galera-0" Dec 05 17:31:59 crc kubenswrapper[4753]: I1205 17:31:59.072923 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c8d8a7a-38bd-49d9-8f25-5495c32462bc-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"4c8d8a7a-38bd-49d9-8f25-5495c32462bc\") " pod="openstack/openstack-cell1-galera-0" Dec 05 17:31:59 crc kubenswrapper[4753]: I1205 17:31:59.100275 4753 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 05 17:31:59 crc kubenswrapper[4753]: I1205 17:31:59.100319 4753 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-5630ed05-2b1c-420b-b615-51c2b8b604e5\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5630ed05-2b1c-420b-b615-51c2b8b604e5\") pod \"openstack-cell1-galera-0\" (UID: \"4c8d8a7a-38bd-49d9-8f25-5495c32462bc\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/92e0d6160b8d702928a8328f3c18577005d308f0498f199f3f6d945a56a4b6d8/globalmount\"" pod="openstack/openstack-cell1-galera-0" Dec 05 17:31:59 crc kubenswrapper[4753]: I1205 17:31:59.104691 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v5fzj\" (UniqueName: \"kubernetes.io/projected/4c8d8a7a-38bd-49d9-8f25-5495c32462bc-kube-api-access-v5fzj\") pod \"openstack-cell1-galera-0\" (UID: \"4c8d8a7a-38bd-49d9-8f25-5495c32462bc\") " pod="openstack/openstack-cell1-galera-0" Dec 05 17:31:59 crc kubenswrapper[4753]: I1205 17:31:59.144498 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Dec 05 17:31:59 crc kubenswrapper[4753]: I1205 17:31:59.145727 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 05 17:31:59 crc kubenswrapper[4753]: I1205 17:31:59.152859 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-b55vx" Dec 05 17:31:59 crc kubenswrapper[4753]: I1205 17:31:59.153119 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Dec 05 17:31:59 crc kubenswrapper[4753]: I1205 17:31:59.153308 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Dec 05 17:31:59 crc kubenswrapper[4753]: I1205 17:31:59.163383 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/8978936d-d71e-4840-9cc4-666746ebeecf-kolla-config\") pod \"memcached-0\" (UID: \"8978936d-d71e-4840-9cc4-666746ebeecf\") " pod="openstack/memcached-0" Dec 05 17:31:59 crc kubenswrapper[4753]: I1205 17:31:59.163459 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/8978936d-d71e-4840-9cc4-666746ebeecf-memcached-tls-certs\") pod \"memcached-0\" (UID: \"8978936d-d71e-4840-9cc4-666746ebeecf\") " pod="openstack/memcached-0" Dec 05 17:31:59 crc kubenswrapper[4753]: I1205 17:31:59.163505 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dthwt\" (UniqueName: \"kubernetes.io/projected/8978936d-d71e-4840-9cc4-666746ebeecf-kube-api-access-dthwt\") pod \"memcached-0\" (UID: \"8978936d-d71e-4840-9cc4-666746ebeecf\") " pod="openstack/memcached-0" Dec 05 17:31:59 crc kubenswrapper[4753]: I1205 17:31:59.163539 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8978936d-d71e-4840-9cc4-666746ebeecf-combined-ca-bundle\") pod \"memcached-0\" (UID: \"8978936d-d71e-4840-9cc4-666746ebeecf\") " pod="openstack/memcached-0" Dec 05 17:31:59 crc kubenswrapper[4753]: I1205 17:31:59.163558 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8978936d-d71e-4840-9cc4-666746ebeecf-config-data\") pod \"memcached-0\" (UID: \"8978936d-d71e-4840-9cc4-666746ebeecf\") " pod="openstack/memcached-0" Dec 05 17:31:59 crc kubenswrapper[4753]: I1205 17:31:59.183264 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 05 17:31:59 crc kubenswrapper[4753]: I1205 17:31:59.264964 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dthwt\" (UniqueName: \"kubernetes.io/projected/8978936d-d71e-4840-9cc4-666746ebeecf-kube-api-access-dthwt\") pod \"memcached-0\" (UID: \"8978936d-d71e-4840-9cc4-666746ebeecf\") " pod="openstack/memcached-0" Dec 05 17:31:59 crc kubenswrapper[4753]: I1205 17:31:59.265053 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8978936d-d71e-4840-9cc4-666746ebeecf-combined-ca-bundle\") pod \"memcached-0\" (UID: \"8978936d-d71e-4840-9cc4-666746ebeecf\") " pod="openstack/memcached-0" Dec 05 17:31:59 crc kubenswrapper[4753]: I1205 17:31:59.265099 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8978936d-d71e-4840-9cc4-666746ebeecf-config-data\") pod \"memcached-0\" (UID: \"8978936d-d71e-4840-9cc4-666746ebeecf\") " pod="openstack/memcached-0" Dec 05 17:31:59 crc kubenswrapper[4753]: I1205 17:31:59.265129 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/8978936d-d71e-4840-9cc4-666746ebeecf-kolla-config\") pod \"memcached-0\" (UID: \"8978936d-d71e-4840-9cc4-666746ebeecf\") " pod="openstack/memcached-0" Dec 05 17:31:59 crc kubenswrapper[4753]: I1205 17:31:59.265309 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/8978936d-d71e-4840-9cc4-666746ebeecf-memcached-tls-certs\") pod \"memcached-0\" (UID: \"8978936d-d71e-4840-9cc4-666746ebeecf\") " pod="openstack/memcached-0" Dec 05 17:31:59 crc kubenswrapper[4753]: I1205 17:31:59.266296 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8978936d-d71e-4840-9cc4-666746ebeecf-config-data\") pod \"memcached-0\" (UID: \"8978936d-d71e-4840-9cc4-666746ebeecf\") " pod="openstack/memcached-0" Dec 05 17:31:59 crc kubenswrapper[4753]: I1205 17:31:59.266567 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/8978936d-d71e-4840-9cc4-666746ebeecf-kolla-config\") pod \"memcached-0\" (UID: \"8978936d-d71e-4840-9cc4-666746ebeecf\") " pod="openstack/memcached-0" Dec 05 17:31:59 crc kubenswrapper[4753]: I1205 17:31:59.268547 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-5630ed05-2b1c-420b-b615-51c2b8b604e5\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-5630ed05-2b1c-420b-b615-51c2b8b604e5\") pod \"openstack-cell1-galera-0\" (UID: \"4c8d8a7a-38bd-49d9-8f25-5495c32462bc\") " pod="openstack/openstack-cell1-galera-0" Dec 05 17:31:59 crc kubenswrapper[4753]: I1205 17:31:59.270096 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/8978936d-d71e-4840-9cc4-666746ebeecf-memcached-tls-certs\") pod \"memcached-0\" (UID: \"8978936d-d71e-4840-9cc4-666746ebeecf\") " pod="openstack/memcached-0" Dec 05 17:31:59 crc kubenswrapper[4753]: I1205 17:31:59.273850 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8978936d-d71e-4840-9cc4-666746ebeecf-combined-ca-bundle\") pod \"memcached-0\" (UID: \"8978936d-d71e-4840-9cc4-666746ebeecf\") " pod="openstack/memcached-0" Dec 05 17:31:59 crc kubenswrapper[4753]: I1205 17:31:59.289915 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dthwt\" (UniqueName: \"kubernetes.io/projected/8978936d-d71e-4840-9cc4-666746ebeecf-kube-api-access-dthwt\") pod \"memcached-0\" (UID: \"8978936d-d71e-4840-9cc4-666746ebeecf\") " pod="openstack/memcached-0" Dec 05 17:31:59 crc kubenswrapper[4753]: I1205 17:31:59.427498 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 05 17:31:59 crc kubenswrapper[4753]: I1205 17:31:59.510113 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 05 17:32:00 crc kubenswrapper[4753]: I1205 17:32:00.593665 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 17:32:00 crc kubenswrapper[4753]: I1205 17:32:00.594671 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 17:32:00 crc kubenswrapper[4753]: I1205 17:32:00.599816 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-nq4bj" Dec 05 17:32:00 crc kubenswrapper[4753]: I1205 17:32:00.603105 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q2vzc\" (UniqueName: \"kubernetes.io/projected/28078f95-1316-46c1-9dda-9912561aa4e4-kube-api-access-q2vzc\") pod \"kube-state-metrics-0\" (UID: \"28078f95-1316-46c1-9dda-9912561aa4e4\") " pod="openstack/kube-state-metrics-0" Dec 05 17:32:00 crc kubenswrapper[4753]: I1205 17:32:00.606627 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 17:32:00 crc kubenswrapper[4753]: I1205 17:32:00.707261 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q2vzc\" (UniqueName: \"kubernetes.io/projected/28078f95-1316-46c1-9dda-9912561aa4e4-kube-api-access-q2vzc\") pod \"kube-state-metrics-0\" (UID: \"28078f95-1316-46c1-9dda-9912561aa4e4\") " pod="openstack/kube-state-metrics-0" Dec 05 17:32:00 crc kubenswrapper[4753]: I1205 17:32:00.748660 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q2vzc\" (UniqueName: \"kubernetes.io/projected/28078f95-1316-46c1-9dda-9912561aa4e4-kube-api-access-q2vzc\") pod \"kube-state-metrics-0\" (UID: \"28078f95-1316-46c1-9dda-9912561aa4e4\") " pod="openstack/kube-state-metrics-0" Dec 05 17:32:00 crc kubenswrapper[4753]: I1205 17:32:00.931678 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 17:32:01 crc kubenswrapper[4753]: I1205 17:32:01.261853 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/alertmanager-metric-storage-0"] Dec 05 17:32:01 crc kubenswrapper[4753]: I1205 17:32:01.263475 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/alertmanager-metric-storage-0" Dec 05 17:32:01 crc kubenswrapper[4753]: I1205 17:32:01.269705 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-web-config" Dec 05 17:32:01 crc kubenswrapper[4753]: I1205 17:32:01.269791 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-generated" Dec 05 17:32:01 crc kubenswrapper[4753]: I1205 17:32:01.269910 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-cluster-tls-config" Dec 05 17:32:01 crc kubenswrapper[4753]: I1205 17:32:01.270019 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"alertmanager-metric-storage-tls-assets-0" Dec 05 17:32:01 crc kubenswrapper[4753]: I1205 17:32:01.270098 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-alertmanager-dockercfg-cbgt5" Dec 05 17:32:01 crc kubenswrapper[4753]: I1205 17:32:01.293266 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/alertmanager-metric-storage-0"] Dec 05 17:32:01 crc kubenswrapper[4753]: I1205 17:32:01.322929 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/f409c39c-6d5a-4950-bd92-2ab8a26ad831-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"f409c39c-6d5a-4950-bd92-2ab8a26ad831\") " pod="openstack/alertmanager-metric-storage-0" Dec 05 17:32:01 crc kubenswrapper[4753]: I1205 17:32:01.322988 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/f409c39c-6d5a-4950-bd92-2ab8a26ad831-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"f409c39c-6d5a-4950-bd92-2ab8a26ad831\") " pod="openstack/alertmanager-metric-storage-0" Dec 05 17:32:01 crc kubenswrapper[4753]: I1205 17:32:01.323025 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/f409c39c-6d5a-4950-bd92-2ab8a26ad831-cluster-tls-config\") pod \"alertmanager-metric-storage-0\" (UID: \"f409c39c-6d5a-4950-bd92-2ab8a26ad831\") " pod="openstack/alertmanager-metric-storage-0" Dec 05 17:32:01 crc kubenswrapper[4753]: I1205 17:32:01.323045 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/f409c39c-6d5a-4950-bd92-2ab8a26ad831-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"f409c39c-6d5a-4950-bd92-2ab8a26ad831\") " pod="openstack/alertmanager-metric-storage-0" Dec 05 17:32:01 crc kubenswrapper[4753]: I1205 17:32:01.323093 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k2xsl\" (UniqueName: \"kubernetes.io/projected/f409c39c-6d5a-4950-bd92-2ab8a26ad831-kube-api-access-k2xsl\") pod \"alertmanager-metric-storage-0\" (UID: \"f409c39c-6d5a-4950-bd92-2ab8a26ad831\") " pod="openstack/alertmanager-metric-storage-0" Dec 05 17:32:01 crc kubenswrapper[4753]: I1205 17:32:01.323117 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/f409c39c-6d5a-4950-bd92-2ab8a26ad831-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"f409c39c-6d5a-4950-bd92-2ab8a26ad831\") " pod="openstack/alertmanager-metric-storage-0" Dec 05 17:32:01 crc kubenswrapper[4753]: I1205 17:32:01.323162 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/f409c39c-6d5a-4950-bd92-2ab8a26ad831-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"f409c39c-6d5a-4950-bd92-2ab8a26ad831\") " pod="openstack/alertmanager-metric-storage-0" Dec 05 17:32:01 crc kubenswrapper[4753]: I1205 17:32:01.424406 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k2xsl\" (UniqueName: \"kubernetes.io/projected/f409c39c-6d5a-4950-bd92-2ab8a26ad831-kube-api-access-k2xsl\") pod \"alertmanager-metric-storage-0\" (UID: \"f409c39c-6d5a-4950-bd92-2ab8a26ad831\") " pod="openstack/alertmanager-metric-storage-0" Dec 05 17:32:01 crc kubenswrapper[4753]: I1205 17:32:01.424520 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/f409c39c-6d5a-4950-bd92-2ab8a26ad831-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"f409c39c-6d5a-4950-bd92-2ab8a26ad831\") " pod="openstack/alertmanager-metric-storage-0" Dec 05 17:32:01 crc kubenswrapper[4753]: I1205 17:32:01.425244 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/f409c39c-6d5a-4950-bd92-2ab8a26ad831-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"f409c39c-6d5a-4950-bd92-2ab8a26ad831\") " pod="openstack/alertmanager-metric-storage-0" Dec 05 17:32:01 crc kubenswrapper[4753]: I1205 17:32:01.424553 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"alertmanager-metric-storage-db\" (UniqueName: \"kubernetes.io/empty-dir/f409c39c-6d5a-4950-bd92-2ab8a26ad831-alertmanager-metric-storage-db\") pod \"alertmanager-metric-storage-0\" (UID: \"f409c39c-6d5a-4950-bd92-2ab8a26ad831\") " pod="openstack/alertmanager-metric-storage-0" Dec 05 17:32:01 crc kubenswrapper[4753]: I1205 17:32:01.425449 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/f409c39c-6d5a-4950-bd92-2ab8a26ad831-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"f409c39c-6d5a-4950-bd92-2ab8a26ad831\") " pod="openstack/alertmanager-metric-storage-0" Dec 05 17:32:01 crc kubenswrapper[4753]: I1205 17:32:01.425490 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/f409c39c-6d5a-4950-bd92-2ab8a26ad831-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"f409c39c-6d5a-4950-bd92-2ab8a26ad831\") " pod="openstack/alertmanager-metric-storage-0" Dec 05 17:32:01 crc kubenswrapper[4753]: I1205 17:32:01.425527 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/f409c39c-6d5a-4950-bd92-2ab8a26ad831-cluster-tls-config\") pod \"alertmanager-metric-storage-0\" (UID: \"f409c39c-6d5a-4950-bd92-2ab8a26ad831\") " pod="openstack/alertmanager-metric-storage-0" Dec 05 17:32:01 crc kubenswrapper[4753]: I1205 17:32:01.425545 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/f409c39c-6d5a-4950-bd92-2ab8a26ad831-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"f409c39c-6d5a-4950-bd92-2ab8a26ad831\") " pod="openstack/alertmanager-metric-storage-0" Dec 05 17:32:01 crc kubenswrapper[4753]: I1205 17:32:01.428746 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/f409c39c-6d5a-4950-bd92-2ab8a26ad831-config-volume\") pod \"alertmanager-metric-storage-0\" (UID: \"f409c39c-6d5a-4950-bd92-2ab8a26ad831\") " pod="openstack/alertmanager-metric-storage-0" Dec 05 17:32:01 crc kubenswrapper[4753]: I1205 17:32:01.443138 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/f409c39c-6d5a-4950-bd92-2ab8a26ad831-tls-assets\") pod \"alertmanager-metric-storage-0\" (UID: \"f409c39c-6d5a-4950-bd92-2ab8a26ad831\") " pod="openstack/alertmanager-metric-storage-0" Dec 05 17:32:01 crc kubenswrapper[4753]: I1205 17:32:01.444915 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/f409c39c-6d5a-4950-bd92-2ab8a26ad831-config-out\") pod \"alertmanager-metric-storage-0\" (UID: \"f409c39c-6d5a-4950-bd92-2ab8a26ad831\") " pod="openstack/alertmanager-metric-storage-0" Dec 05 17:32:01 crc kubenswrapper[4753]: I1205 17:32:01.447122 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/f409c39c-6d5a-4950-bd92-2ab8a26ad831-web-config\") pod \"alertmanager-metric-storage-0\" (UID: \"f409c39c-6d5a-4950-bd92-2ab8a26ad831\") " pod="openstack/alertmanager-metric-storage-0" Dec 05 17:32:01 crc kubenswrapper[4753]: I1205 17:32:01.447788 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/f409c39c-6d5a-4950-bd92-2ab8a26ad831-cluster-tls-config\") pod \"alertmanager-metric-storage-0\" (UID: \"f409c39c-6d5a-4950-bd92-2ab8a26ad831\") " pod="openstack/alertmanager-metric-storage-0" Dec 05 17:32:01 crc kubenswrapper[4753]: I1205 17:32:01.450307 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k2xsl\" (UniqueName: \"kubernetes.io/projected/f409c39c-6d5a-4950-bd92-2ab8a26ad831-kube-api-access-k2xsl\") pod \"alertmanager-metric-storage-0\" (UID: \"f409c39c-6d5a-4950-bd92-2ab8a26ad831\") " pod="openstack/alertmanager-metric-storage-0" Dec 05 17:32:01 crc kubenswrapper[4753]: I1205 17:32:01.588363 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/alertmanager-metric-storage-0" Dec 05 17:32:01 crc kubenswrapper[4753]: I1205 17:32:01.917701 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 05 17:32:01 crc kubenswrapper[4753]: I1205 17:32:01.920409 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 05 17:32:01 crc kubenswrapper[4753]: I1205 17:32:01.923697 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage" Dec 05 17:32:01 crc kubenswrapper[4753]: I1205 17:32:01.923913 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-0" Dec 05 17:32:01 crc kubenswrapper[4753]: I1205 17:32:01.924092 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Dec 05 17:32:01 crc kubenswrapper[4753]: I1205 17:32:01.924261 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-prometheus-dockercfg-44qtk" Dec 05 17:32:01 crc kubenswrapper[4753]: I1205 17:32:01.924392 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-web-config" Dec 05 17:32:01 crc kubenswrapper[4753]: I1205 17:32:01.932892 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-tls-assets-0" Dec 05 17:32:01 crc kubenswrapper[4753]: I1205 17:32:01.959950 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 05 17:32:02 crc kubenswrapper[4753]: I1205 17:32:02.034570 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/c44e1a93-e233-46a2-b18a-e6c8c396a394-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"c44e1a93-e233-46a2-b18a-e6c8c396a394\") " pod="openstack/prometheus-metric-storage-0" Dec 05 17:32:02 crc kubenswrapper[4753]: I1205 17:32:02.034626 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/c44e1a93-e233-46a2-b18a-e6c8c396a394-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"c44e1a93-e233-46a2-b18a-e6c8c396a394\") " pod="openstack/prometheus-metric-storage-0" Dec 05 17:32:02 crc kubenswrapper[4753]: I1205 17:32:02.034711 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r654f\" (UniqueName: \"kubernetes.io/projected/c44e1a93-e233-46a2-b18a-e6c8c396a394-kube-api-access-r654f\") pod \"prometheus-metric-storage-0\" (UID: \"c44e1a93-e233-46a2-b18a-e6c8c396a394\") " pod="openstack/prometheus-metric-storage-0" Dec 05 17:32:02 crc kubenswrapper[4753]: I1205 17:32:02.034773 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/c44e1a93-e233-46a2-b18a-e6c8c396a394-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"c44e1a93-e233-46a2-b18a-e6c8c396a394\") " pod="openstack/prometheus-metric-storage-0" Dec 05 17:32:02 crc kubenswrapper[4753]: I1205 17:32:02.034819 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/c44e1a93-e233-46a2-b18a-e6c8c396a394-config\") pod \"prometheus-metric-storage-0\" (UID: \"c44e1a93-e233-46a2-b18a-e6c8c396a394\") " pod="openstack/prometheus-metric-storage-0" Dec 05 17:32:02 crc kubenswrapper[4753]: I1205 17:32:02.034852 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/c44e1a93-e233-46a2-b18a-e6c8c396a394-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"c44e1a93-e233-46a2-b18a-e6c8c396a394\") " pod="openstack/prometheus-metric-storage-0" Dec 05 17:32:02 crc kubenswrapper[4753]: I1205 17:32:02.034879 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-f9bf6d09-4f3d-42cd-98e3-4107b8942318\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f9bf6d09-4f3d-42cd-98e3-4107b8942318\") pod \"prometheus-metric-storage-0\" (UID: \"c44e1a93-e233-46a2-b18a-e6c8c396a394\") " pod="openstack/prometheus-metric-storage-0" Dec 05 17:32:02 crc kubenswrapper[4753]: I1205 17:32:02.034916 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/c44e1a93-e233-46a2-b18a-e6c8c396a394-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"c44e1a93-e233-46a2-b18a-e6c8c396a394\") " pod="openstack/prometheus-metric-storage-0" Dec 05 17:32:02 crc kubenswrapper[4753]: I1205 17:32:02.136476 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/c44e1a93-e233-46a2-b18a-e6c8c396a394-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"c44e1a93-e233-46a2-b18a-e6c8c396a394\") " pod="openstack/prometheus-metric-storage-0" Dec 05 17:32:02 crc kubenswrapper[4753]: I1205 17:32:02.136545 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/c44e1a93-e233-46a2-b18a-e6c8c396a394-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"c44e1a93-e233-46a2-b18a-e6c8c396a394\") " pod="openstack/prometheus-metric-storage-0" Dec 05 17:32:02 crc kubenswrapper[4753]: I1205 17:32:02.136588 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r654f\" (UniqueName: \"kubernetes.io/projected/c44e1a93-e233-46a2-b18a-e6c8c396a394-kube-api-access-r654f\") pod \"prometheus-metric-storage-0\" (UID: \"c44e1a93-e233-46a2-b18a-e6c8c396a394\") " pod="openstack/prometheus-metric-storage-0" Dec 05 17:32:02 crc kubenswrapper[4753]: I1205 17:32:02.136620 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/c44e1a93-e233-46a2-b18a-e6c8c396a394-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"c44e1a93-e233-46a2-b18a-e6c8c396a394\") " pod="openstack/prometheus-metric-storage-0" Dec 05 17:32:02 crc kubenswrapper[4753]: I1205 17:32:02.136646 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/c44e1a93-e233-46a2-b18a-e6c8c396a394-config\") pod \"prometheus-metric-storage-0\" (UID: \"c44e1a93-e233-46a2-b18a-e6c8c396a394\") " pod="openstack/prometheus-metric-storage-0" Dec 05 17:32:02 crc kubenswrapper[4753]: I1205 17:32:02.136687 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/c44e1a93-e233-46a2-b18a-e6c8c396a394-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"c44e1a93-e233-46a2-b18a-e6c8c396a394\") " pod="openstack/prometheus-metric-storage-0" Dec 05 17:32:02 crc kubenswrapper[4753]: I1205 17:32:02.136737 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-f9bf6d09-4f3d-42cd-98e3-4107b8942318\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f9bf6d09-4f3d-42cd-98e3-4107b8942318\") pod \"prometheus-metric-storage-0\" (UID: \"c44e1a93-e233-46a2-b18a-e6c8c396a394\") " pod="openstack/prometheus-metric-storage-0" Dec 05 17:32:02 crc kubenswrapper[4753]: I1205 17:32:02.136791 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/c44e1a93-e233-46a2-b18a-e6c8c396a394-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"c44e1a93-e233-46a2-b18a-e6c8c396a394\") " pod="openstack/prometheus-metric-storage-0" Dec 05 17:32:02 crc kubenswrapper[4753]: I1205 17:32:02.137658 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/c44e1a93-e233-46a2-b18a-e6c8c396a394-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"c44e1a93-e233-46a2-b18a-e6c8c396a394\") " pod="openstack/prometheus-metric-storage-0" Dec 05 17:32:02 crc kubenswrapper[4753]: I1205 17:32:02.141379 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/c44e1a93-e233-46a2-b18a-e6c8c396a394-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"c44e1a93-e233-46a2-b18a-e6c8c396a394\") " pod="openstack/prometheus-metric-storage-0" Dec 05 17:32:02 crc kubenswrapper[4753]: I1205 17:32:02.141591 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/c44e1a93-e233-46a2-b18a-e6c8c396a394-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"c44e1a93-e233-46a2-b18a-e6c8c396a394\") " pod="openstack/prometheus-metric-storage-0" Dec 05 17:32:02 crc kubenswrapper[4753]: I1205 17:32:02.142285 4753 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 05 17:32:02 crc kubenswrapper[4753]: I1205 17:32:02.142321 4753 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-f9bf6d09-4f3d-42cd-98e3-4107b8942318\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f9bf6d09-4f3d-42cd-98e3-4107b8942318\") pod \"prometheus-metric-storage-0\" (UID: \"c44e1a93-e233-46a2-b18a-e6c8c396a394\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/041b3d1c919a392dd8ecbf05ce919b761f2a980688ec2105618d552148562637/globalmount\"" pod="openstack/prometheus-metric-storage-0" Dec 05 17:32:02 crc kubenswrapper[4753]: I1205 17:32:02.151055 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/c44e1a93-e233-46a2-b18a-e6c8c396a394-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"c44e1a93-e233-46a2-b18a-e6c8c396a394\") " pod="openstack/prometheus-metric-storage-0" Dec 05 17:32:02 crc kubenswrapper[4753]: I1205 17:32:02.153983 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/c44e1a93-e233-46a2-b18a-e6c8c396a394-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"c44e1a93-e233-46a2-b18a-e6c8c396a394\") " pod="openstack/prometheus-metric-storage-0" Dec 05 17:32:02 crc kubenswrapper[4753]: I1205 17:32:02.160085 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/c44e1a93-e233-46a2-b18a-e6c8c396a394-config\") pod \"prometheus-metric-storage-0\" (UID: \"c44e1a93-e233-46a2-b18a-e6c8c396a394\") " pod="openstack/prometheus-metric-storage-0" Dec 05 17:32:02 crc kubenswrapper[4753]: I1205 17:32:02.160232 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r654f\" (UniqueName: \"kubernetes.io/projected/c44e1a93-e233-46a2-b18a-e6c8c396a394-kube-api-access-r654f\") pod \"prometheus-metric-storage-0\" (UID: \"c44e1a93-e233-46a2-b18a-e6c8c396a394\") " pod="openstack/prometheus-metric-storage-0" Dec 05 17:32:02 crc kubenswrapper[4753]: I1205 17:32:02.197693 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-f9bf6d09-4f3d-42cd-98e3-4107b8942318\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f9bf6d09-4f3d-42cd-98e3-4107b8942318\") pod \"prometheus-metric-storage-0\" (UID: \"c44e1a93-e233-46a2-b18a-e6c8c396a394\") " pod="openstack/prometheus-metric-storage-0" Dec 05 17:32:02 crc kubenswrapper[4753]: I1205 17:32:02.246258 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 05 17:32:03 crc kubenswrapper[4753]: I1205 17:32:03.176036 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"f5652c22-3bf2-454d-a4cf-fd0378f133b8","Type":"ContainerStarted","Data":"ca2bd45670cff0f53216a3ec6be8bf66adf722857e658a5c60d7a89c6e5d203a"} Dec 05 17:32:03 crc kubenswrapper[4753]: I1205 17:32:03.177255 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"36088f6e-7c3c-4fee-918e-e1ee91bf6b33","Type":"ContainerStarted","Data":"d8c9c26ee97b7977a5c178567c872a01fb48321a9311a3c46f2d8d115c3bf824"} Dec 05 17:32:04 crc kubenswrapper[4753]: I1205 17:32:04.370989 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-8m7cw"] Dec 05 17:32:04 crc kubenswrapper[4753]: I1205 17:32:04.378490 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-8m7cw" Dec 05 17:32:04 crc kubenswrapper[4753]: I1205 17:32:04.386044 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-qhphp"] Dec 05 17:32:04 crc kubenswrapper[4753]: I1205 17:32:04.386719 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Dec 05 17:32:04 crc kubenswrapper[4753]: I1205 17:32:04.386803 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Dec 05 17:32:04 crc kubenswrapper[4753]: I1205 17:32:04.386940 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-98gl4" Dec 05 17:32:04 crc kubenswrapper[4753]: I1205 17:32:04.390345 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-qhphp" Dec 05 17:32:04 crc kubenswrapper[4753]: I1205 17:32:04.407000 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-8m7cw"] Dec 05 17:32:04 crc kubenswrapper[4753]: I1205 17:32:04.436847 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-qhphp"] Dec 05 17:32:04 crc kubenswrapper[4753]: I1205 17:32:04.481197 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f33836c-96c6-4da3-b2d6-e9c12631f2b4-combined-ca-bundle\") pod \"ovn-controller-8m7cw\" (UID: \"9f33836c-96c6-4da3-b2d6-e9c12631f2b4\") " pod="openstack/ovn-controller-8m7cw" Dec 05 17:32:04 crc kubenswrapper[4753]: I1205 17:32:04.481268 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cltgk\" (UniqueName: \"kubernetes.io/projected/9f33836c-96c6-4da3-b2d6-e9c12631f2b4-kube-api-access-cltgk\") pod \"ovn-controller-8m7cw\" (UID: \"9f33836c-96c6-4da3-b2d6-e9c12631f2b4\") " pod="openstack/ovn-controller-8m7cw" Dec 05 17:32:04 crc kubenswrapper[4753]: I1205 17:32:04.481310 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/9f33836c-96c6-4da3-b2d6-e9c12631f2b4-var-log-ovn\") pod \"ovn-controller-8m7cw\" (UID: \"9f33836c-96c6-4da3-b2d6-e9c12631f2b4\") " pod="openstack/ovn-controller-8m7cw" Dec 05 17:32:04 crc kubenswrapper[4753]: I1205 17:32:04.481368 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/9f33836c-96c6-4da3-b2d6-e9c12631f2b4-var-run-ovn\") pod \"ovn-controller-8m7cw\" (UID: \"9f33836c-96c6-4da3-b2d6-e9c12631f2b4\") " pod="openstack/ovn-controller-8m7cw" Dec 05 17:32:04 crc kubenswrapper[4753]: I1205 17:32:04.481404 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9f33836c-96c6-4da3-b2d6-e9c12631f2b4-var-run\") pod \"ovn-controller-8m7cw\" (UID: \"9f33836c-96c6-4da3-b2d6-e9c12631f2b4\") " pod="openstack/ovn-controller-8m7cw" Dec 05 17:32:04 crc kubenswrapper[4753]: I1205 17:32:04.481425 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9f33836c-96c6-4da3-b2d6-e9c12631f2b4-scripts\") pod \"ovn-controller-8m7cw\" (UID: \"9f33836c-96c6-4da3-b2d6-e9c12631f2b4\") " pod="openstack/ovn-controller-8m7cw" Dec 05 17:32:04 crc kubenswrapper[4753]: I1205 17:32:04.481463 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/9f33836c-96c6-4da3-b2d6-e9c12631f2b4-ovn-controller-tls-certs\") pod \"ovn-controller-8m7cw\" (UID: \"9f33836c-96c6-4da3-b2d6-e9c12631f2b4\") " pod="openstack/ovn-controller-8m7cw" Dec 05 17:32:04 crc kubenswrapper[4753]: I1205 17:32:04.583390 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/9f33836c-96c6-4da3-b2d6-e9c12631f2b4-ovn-controller-tls-certs\") pod \"ovn-controller-8m7cw\" (UID: \"9f33836c-96c6-4da3-b2d6-e9c12631f2b4\") " pod="openstack/ovn-controller-8m7cw" Dec 05 17:32:04 crc kubenswrapper[4753]: I1205 17:32:04.583455 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/284db7f3-ca89-447a-90eb-487d43e49f7d-etc-ovs\") pod \"ovn-controller-ovs-qhphp\" (UID: \"284db7f3-ca89-447a-90eb-487d43e49f7d\") " pod="openstack/ovn-controller-ovs-qhphp" Dec 05 17:32:04 crc kubenswrapper[4753]: I1205 17:32:04.583493 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/284db7f3-ca89-447a-90eb-487d43e49f7d-var-lib\") pod \"ovn-controller-ovs-qhphp\" (UID: \"284db7f3-ca89-447a-90eb-487d43e49f7d\") " pod="openstack/ovn-controller-ovs-qhphp" Dec 05 17:32:04 crc kubenswrapper[4753]: I1205 17:32:04.583523 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/284db7f3-ca89-447a-90eb-487d43e49f7d-scripts\") pod \"ovn-controller-ovs-qhphp\" (UID: \"284db7f3-ca89-447a-90eb-487d43e49f7d\") " pod="openstack/ovn-controller-ovs-qhphp" Dec 05 17:32:04 crc kubenswrapper[4753]: I1205 17:32:04.583548 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f33836c-96c6-4da3-b2d6-e9c12631f2b4-combined-ca-bundle\") pod \"ovn-controller-8m7cw\" (UID: \"9f33836c-96c6-4da3-b2d6-e9c12631f2b4\") " pod="openstack/ovn-controller-8m7cw" Dec 05 17:32:04 crc kubenswrapper[4753]: I1205 17:32:04.583698 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-np796\" (UniqueName: \"kubernetes.io/projected/284db7f3-ca89-447a-90eb-487d43e49f7d-kube-api-access-np796\") pod \"ovn-controller-ovs-qhphp\" (UID: \"284db7f3-ca89-447a-90eb-487d43e49f7d\") " pod="openstack/ovn-controller-ovs-qhphp" Dec 05 17:32:04 crc kubenswrapper[4753]: I1205 17:32:04.583777 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cltgk\" (UniqueName: \"kubernetes.io/projected/9f33836c-96c6-4da3-b2d6-e9c12631f2b4-kube-api-access-cltgk\") pod \"ovn-controller-8m7cw\" (UID: \"9f33836c-96c6-4da3-b2d6-e9c12631f2b4\") " pod="openstack/ovn-controller-8m7cw" Dec 05 17:32:04 crc kubenswrapper[4753]: I1205 17:32:04.583803 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/9f33836c-96c6-4da3-b2d6-e9c12631f2b4-var-log-ovn\") pod \"ovn-controller-8m7cw\" (UID: \"9f33836c-96c6-4da3-b2d6-e9c12631f2b4\") " pod="openstack/ovn-controller-8m7cw" Dec 05 17:32:04 crc kubenswrapper[4753]: I1205 17:32:04.583867 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/9f33836c-96c6-4da3-b2d6-e9c12631f2b4-var-run-ovn\") pod \"ovn-controller-8m7cw\" (UID: \"9f33836c-96c6-4da3-b2d6-e9c12631f2b4\") " pod="openstack/ovn-controller-8m7cw" Dec 05 17:32:04 crc kubenswrapper[4753]: I1205 17:32:04.583902 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/284db7f3-ca89-447a-90eb-487d43e49f7d-var-run\") pod \"ovn-controller-ovs-qhphp\" (UID: \"284db7f3-ca89-447a-90eb-487d43e49f7d\") " pod="openstack/ovn-controller-ovs-qhphp" Dec 05 17:32:04 crc kubenswrapper[4753]: I1205 17:32:04.584111 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9f33836c-96c6-4da3-b2d6-e9c12631f2b4-var-run\") pod \"ovn-controller-8m7cw\" (UID: \"9f33836c-96c6-4da3-b2d6-e9c12631f2b4\") " pod="openstack/ovn-controller-8m7cw" Dec 05 17:32:04 crc kubenswrapper[4753]: I1205 17:32:04.584179 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/284db7f3-ca89-447a-90eb-487d43e49f7d-var-log\") pod \"ovn-controller-ovs-qhphp\" (UID: \"284db7f3-ca89-447a-90eb-487d43e49f7d\") " pod="openstack/ovn-controller-ovs-qhphp" Dec 05 17:32:04 crc kubenswrapper[4753]: I1205 17:32:04.584216 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9f33836c-96c6-4da3-b2d6-e9c12631f2b4-scripts\") pod \"ovn-controller-8m7cw\" (UID: \"9f33836c-96c6-4da3-b2d6-e9c12631f2b4\") " pod="openstack/ovn-controller-8m7cw" Dec 05 17:32:04 crc kubenswrapper[4753]: I1205 17:32:04.584932 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/9f33836c-96c6-4da3-b2d6-e9c12631f2b4-var-log-ovn\") pod \"ovn-controller-8m7cw\" (UID: \"9f33836c-96c6-4da3-b2d6-e9c12631f2b4\") " pod="openstack/ovn-controller-8m7cw" Dec 05 17:32:04 crc kubenswrapper[4753]: I1205 17:32:04.585104 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/9f33836c-96c6-4da3-b2d6-e9c12631f2b4-var-run-ovn\") pod \"ovn-controller-8m7cw\" (UID: \"9f33836c-96c6-4da3-b2d6-e9c12631f2b4\") " pod="openstack/ovn-controller-8m7cw" Dec 05 17:32:04 crc kubenswrapper[4753]: I1205 17:32:04.585359 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9f33836c-96c6-4da3-b2d6-e9c12631f2b4-var-run\") pod \"ovn-controller-8m7cw\" (UID: \"9f33836c-96c6-4da3-b2d6-e9c12631f2b4\") " pod="openstack/ovn-controller-8m7cw" Dec 05 17:32:04 crc kubenswrapper[4753]: I1205 17:32:04.588828 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f33836c-96c6-4da3-b2d6-e9c12631f2b4-combined-ca-bundle\") pod \"ovn-controller-8m7cw\" (UID: \"9f33836c-96c6-4da3-b2d6-e9c12631f2b4\") " pod="openstack/ovn-controller-8m7cw" Dec 05 17:32:04 crc kubenswrapper[4753]: I1205 17:32:04.589998 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9f33836c-96c6-4da3-b2d6-e9c12631f2b4-scripts\") pod \"ovn-controller-8m7cw\" (UID: \"9f33836c-96c6-4da3-b2d6-e9c12631f2b4\") " pod="openstack/ovn-controller-8m7cw" Dec 05 17:32:04 crc kubenswrapper[4753]: I1205 17:32:04.590447 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/9f33836c-96c6-4da3-b2d6-e9c12631f2b4-ovn-controller-tls-certs\") pod \"ovn-controller-8m7cw\" (UID: \"9f33836c-96c6-4da3-b2d6-e9c12631f2b4\") " pod="openstack/ovn-controller-8m7cw" Dec 05 17:32:04 crc kubenswrapper[4753]: I1205 17:32:04.607778 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cltgk\" (UniqueName: \"kubernetes.io/projected/9f33836c-96c6-4da3-b2d6-e9c12631f2b4-kube-api-access-cltgk\") pod \"ovn-controller-8m7cw\" (UID: \"9f33836c-96c6-4da3-b2d6-e9c12631f2b4\") " pod="openstack/ovn-controller-8m7cw" Dec 05 17:32:04 crc kubenswrapper[4753]: I1205 17:32:04.685860 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/284db7f3-ca89-447a-90eb-487d43e49f7d-etc-ovs\") pod \"ovn-controller-ovs-qhphp\" (UID: \"284db7f3-ca89-447a-90eb-487d43e49f7d\") " pod="openstack/ovn-controller-ovs-qhphp" Dec 05 17:32:04 crc kubenswrapper[4753]: I1205 17:32:04.685932 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/284db7f3-ca89-447a-90eb-487d43e49f7d-var-lib\") pod \"ovn-controller-ovs-qhphp\" (UID: \"284db7f3-ca89-447a-90eb-487d43e49f7d\") " pod="openstack/ovn-controller-ovs-qhphp" Dec 05 17:32:04 crc kubenswrapper[4753]: I1205 17:32:04.685972 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/284db7f3-ca89-447a-90eb-487d43e49f7d-scripts\") pod \"ovn-controller-ovs-qhphp\" (UID: \"284db7f3-ca89-447a-90eb-487d43e49f7d\") " pod="openstack/ovn-controller-ovs-qhphp" Dec 05 17:32:04 crc kubenswrapper[4753]: I1205 17:32:04.686014 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-np796\" (UniqueName: \"kubernetes.io/projected/284db7f3-ca89-447a-90eb-487d43e49f7d-kube-api-access-np796\") pod \"ovn-controller-ovs-qhphp\" (UID: \"284db7f3-ca89-447a-90eb-487d43e49f7d\") " pod="openstack/ovn-controller-ovs-qhphp" Dec 05 17:32:04 crc kubenswrapper[4753]: I1205 17:32:04.686131 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/284db7f3-ca89-447a-90eb-487d43e49f7d-var-run\") pod \"ovn-controller-ovs-qhphp\" (UID: \"284db7f3-ca89-447a-90eb-487d43e49f7d\") " pod="openstack/ovn-controller-ovs-qhphp" Dec 05 17:32:04 crc kubenswrapper[4753]: I1205 17:32:04.686179 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/284db7f3-ca89-447a-90eb-487d43e49f7d-var-log\") pod \"ovn-controller-ovs-qhphp\" (UID: \"284db7f3-ca89-447a-90eb-487d43e49f7d\") " pod="openstack/ovn-controller-ovs-qhphp" Dec 05 17:32:04 crc kubenswrapper[4753]: I1205 17:32:04.686213 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/284db7f3-ca89-447a-90eb-487d43e49f7d-etc-ovs\") pod \"ovn-controller-ovs-qhphp\" (UID: \"284db7f3-ca89-447a-90eb-487d43e49f7d\") " pod="openstack/ovn-controller-ovs-qhphp" Dec 05 17:32:04 crc kubenswrapper[4753]: I1205 17:32:04.686301 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/284db7f3-ca89-447a-90eb-487d43e49f7d-var-run\") pod \"ovn-controller-ovs-qhphp\" (UID: \"284db7f3-ca89-447a-90eb-487d43e49f7d\") " pod="openstack/ovn-controller-ovs-qhphp" Dec 05 17:32:04 crc kubenswrapper[4753]: I1205 17:32:04.686445 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/284db7f3-ca89-447a-90eb-487d43e49f7d-var-log\") pod \"ovn-controller-ovs-qhphp\" (UID: \"284db7f3-ca89-447a-90eb-487d43e49f7d\") " pod="openstack/ovn-controller-ovs-qhphp" Dec 05 17:32:04 crc kubenswrapper[4753]: I1205 17:32:04.688242 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/284db7f3-ca89-447a-90eb-487d43e49f7d-scripts\") pod \"ovn-controller-ovs-qhphp\" (UID: \"284db7f3-ca89-447a-90eb-487d43e49f7d\") " pod="openstack/ovn-controller-ovs-qhphp" Dec 05 17:32:04 crc kubenswrapper[4753]: I1205 17:32:04.688405 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/284db7f3-ca89-447a-90eb-487d43e49f7d-var-lib\") pod \"ovn-controller-ovs-qhphp\" (UID: \"284db7f3-ca89-447a-90eb-487d43e49f7d\") " pod="openstack/ovn-controller-ovs-qhphp" Dec 05 17:32:04 crc kubenswrapper[4753]: I1205 17:32:04.705579 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-np796\" (UniqueName: \"kubernetes.io/projected/284db7f3-ca89-447a-90eb-487d43e49f7d-kube-api-access-np796\") pod \"ovn-controller-ovs-qhphp\" (UID: \"284db7f3-ca89-447a-90eb-487d43e49f7d\") " pod="openstack/ovn-controller-ovs-qhphp" Dec 05 17:32:04 crc kubenswrapper[4753]: I1205 17:32:04.720998 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-8m7cw" Dec 05 17:32:04 crc kubenswrapper[4753]: I1205 17:32:04.726350 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 05 17:32:04 crc kubenswrapper[4753]: I1205 17:32:04.727783 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 05 17:32:04 crc kubenswrapper[4753]: I1205 17:32:04.728233 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-qhphp" Dec 05 17:32:04 crc kubenswrapper[4753]: I1205 17:32:04.730327 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-6sbz7" Dec 05 17:32:04 crc kubenswrapper[4753]: I1205 17:32:04.730340 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Dec 05 17:32:04 crc kubenswrapper[4753]: I1205 17:32:04.730727 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Dec 05 17:32:04 crc kubenswrapper[4753]: I1205 17:32:04.767292 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Dec 05 17:32:04 crc kubenswrapper[4753]: I1205 17:32:04.767432 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Dec 05 17:32:04 crc kubenswrapper[4753]: I1205 17:32:04.776379 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 05 17:32:04 crc kubenswrapper[4753]: I1205 17:32:04.895197 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8c13e8e-fec9-49e2-a2b0-5ca0473d2469-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"f8c13e8e-fec9-49e2-a2b0-5ca0473d2469\") " pod="openstack/ovsdbserver-nb-0" Dec 05 17:32:04 crc kubenswrapper[4753]: I1205 17:32:04.896266 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-8bf8014d-31e7-4cb4-8a25-c70f11b92aba\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8bf8014d-31e7-4cb4-8a25-c70f11b92aba\") pod \"ovsdbserver-nb-0\" (UID: \"f8c13e8e-fec9-49e2-a2b0-5ca0473d2469\") " pod="openstack/ovsdbserver-nb-0" Dec 05 17:32:04 crc kubenswrapper[4753]: I1205 17:32:04.896320 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/f8c13e8e-fec9-49e2-a2b0-5ca0473d2469-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"f8c13e8e-fec9-49e2-a2b0-5ca0473d2469\") " pod="openstack/ovsdbserver-nb-0" Dec 05 17:32:04 crc kubenswrapper[4753]: I1205 17:32:04.896697 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f8c13e8e-fec9-49e2-a2b0-5ca0473d2469-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"f8c13e8e-fec9-49e2-a2b0-5ca0473d2469\") " pod="openstack/ovsdbserver-nb-0" Dec 05 17:32:04 crc kubenswrapper[4753]: I1205 17:32:04.896910 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8mks7\" (UniqueName: \"kubernetes.io/projected/f8c13e8e-fec9-49e2-a2b0-5ca0473d2469-kube-api-access-8mks7\") pod \"ovsdbserver-nb-0\" (UID: \"f8c13e8e-fec9-49e2-a2b0-5ca0473d2469\") " pod="openstack/ovsdbserver-nb-0" Dec 05 17:32:04 crc kubenswrapper[4753]: I1205 17:32:04.897051 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/f8c13e8e-fec9-49e2-a2b0-5ca0473d2469-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"f8c13e8e-fec9-49e2-a2b0-5ca0473d2469\") " pod="openstack/ovsdbserver-nb-0" Dec 05 17:32:04 crc kubenswrapper[4753]: I1205 17:32:04.897131 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f8c13e8e-fec9-49e2-a2b0-5ca0473d2469-config\") pod \"ovsdbserver-nb-0\" (UID: \"f8c13e8e-fec9-49e2-a2b0-5ca0473d2469\") " pod="openstack/ovsdbserver-nb-0" Dec 05 17:32:04 crc kubenswrapper[4753]: I1205 17:32:04.897200 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/f8c13e8e-fec9-49e2-a2b0-5ca0473d2469-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"f8c13e8e-fec9-49e2-a2b0-5ca0473d2469\") " pod="openstack/ovsdbserver-nb-0" Dec 05 17:32:04 crc kubenswrapper[4753]: I1205 17:32:04.998622 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f8c13e8e-fec9-49e2-a2b0-5ca0473d2469-config\") pod \"ovsdbserver-nb-0\" (UID: \"f8c13e8e-fec9-49e2-a2b0-5ca0473d2469\") " pod="openstack/ovsdbserver-nb-0" Dec 05 17:32:04 crc kubenswrapper[4753]: I1205 17:32:04.998681 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/f8c13e8e-fec9-49e2-a2b0-5ca0473d2469-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"f8c13e8e-fec9-49e2-a2b0-5ca0473d2469\") " pod="openstack/ovsdbserver-nb-0" Dec 05 17:32:04 crc kubenswrapper[4753]: I1205 17:32:04.998758 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8c13e8e-fec9-49e2-a2b0-5ca0473d2469-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"f8c13e8e-fec9-49e2-a2b0-5ca0473d2469\") " pod="openstack/ovsdbserver-nb-0" Dec 05 17:32:04 crc kubenswrapper[4753]: I1205 17:32:04.998797 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-8bf8014d-31e7-4cb4-8a25-c70f11b92aba\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8bf8014d-31e7-4cb4-8a25-c70f11b92aba\") pod \"ovsdbserver-nb-0\" (UID: \"f8c13e8e-fec9-49e2-a2b0-5ca0473d2469\") " pod="openstack/ovsdbserver-nb-0" Dec 05 17:32:04 crc kubenswrapper[4753]: I1205 17:32:04.998826 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/f8c13e8e-fec9-49e2-a2b0-5ca0473d2469-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"f8c13e8e-fec9-49e2-a2b0-5ca0473d2469\") " pod="openstack/ovsdbserver-nb-0" Dec 05 17:32:04 crc kubenswrapper[4753]: I1205 17:32:04.998874 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f8c13e8e-fec9-49e2-a2b0-5ca0473d2469-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"f8c13e8e-fec9-49e2-a2b0-5ca0473d2469\") " pod="openstack/ovsdbserver-nb-0" Dec 05 17:32:04 crc kubenswrapper[4753]: I1205 17:32:04.998904 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8mks7\" (UniqueName: \"kubernetes.io/projected/f8c13e8e-fec9-49e2-a2b0-5ca0473d2469-kube-api-access-8mks7\") pod \"ovsdbserver-nb-0\" (UID: \"f8c13e8e-fec9-49e2-a2b0-5ca0473d2469\") " pod="openstack/ovsdbserver-nb-0" Dec 05 17:32:04 crc kubenswrapper[4753]: I1205 17:32:04.998950 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/f8c13e8e-fec9-49e2-a2b0-5ca0473d2469-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"f8c13e8e-fec9-49e2-a2b0-5ca0473d2469\") " pod="openstack/ovsdbserver-nb-0" Dec 05 17:32:05 crc kubenswrapper[4753]: I1205 17:32:05.000269 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f8c13e8e-fec9-49e2-a2b0-5ca0473d2469-config\") pod \"ovsdbserver-nb-0\" (UID: \"f8c13e8e-fec9-49e2-a2b0-5ca0473d2469\") " pod="openstack/ovsdbserver-nb-0" Dec 05 17:32:05 crc kubenswrapper[4753]: I1205 17:32:05.001004 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f8c13e8e-fec9-49e2-a2b0-5ca0473d2469-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"f8c13e8e-fec9-49e2-a2b0-5ca0473d2469\") " pod="openstack/ovsdbserver-nb-0" Dec 05 17:32:05 crc kubenswrapper[4753]: I1205 17:32:05.001662 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/f8c13e8e-fec9-49e2-a2b0-5ca0473d2469-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"f8c13e8e-fec9-49e2-a2b0-5ca0473d2469\") " pod="openstack/ovsdbserver-nb-0" Dec 05 17:32:05 crc kubenswrapper[4753]: I1205 17:32:05.002896 4753 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 05 17:32:05 crc kubenswrapper[4753]: I1205 17:32:05.002929 4753 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-8bf8014d-31e7-4cb4-8a25-c70f11b92aba\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8bf8014d-31e7-4cb4-8a25-c70f11b92aba\") pod \"ovsdbserver-nb-0\" (UID: \"f8c13e8e-fec9-49e2-a2b0-5ca0473d2469\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/2a3068535026a417cdfcb9736772f394f07840b827eb2c956312ba3d030aa889/globalmount\"" pod="openstack/ovsdbserver-nb-0" Dec 05 17:32:05 crc kubenswrapper[4753]: I1205 17:32:05.003605 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f8c13e8e-fec9-49e2-a2b0-5ca0473d2469-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"f8c13e8e-fec9-49e2-a2b0-5ca0473d2469\") " pod="openstack/ovsdbserver-nb-0" Dec 05 17:32:05 crc kubenswrapper[4753]: I1205 17:32:05.005052 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/f8c13e8e-fec9-49e2-a2b0-5ca0473d2469-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"f8c13e8e-fec9-49e2-a2b0-5ca0473d2469\") " pod="openstack/ovsdbserver-nb-0" Dec 05 17:32:05 crc kubenswrapper[4753]: I1205 17:32:05.005588 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/f8c13e8e-fec9-49e2-a2b0-5ca0473d2469-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"f8c13e8e-fec9-49e2-a2b0-5ca0473d2469\") " pod="openstack/ovsdbserver-nb-0" Dec 05 17:32:05 crc kubenswrapper[4753]: I1205 17:32:05.017423 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8mks7\" (UniqueName: \"kubernetes.io/projected/f8c13e8e-fec9-49e2-a2b0-5ca0473d2469-kube-api-access-8mks7\") pod \"ovsdbserver-nb-0\" (UID: \"f8c13e8e-fec9-49e2-a2b0-5ca0473d2469\") " pod="openstack/ovsdbserver-nb-0" Dec 05 17:32:05 crc kubenswrapper[4753]: I1205 17:32:05.055418 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-8bf8014d-31e7-4cb4-8a25-c70f11b92aba\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8bf8014d-31e7-4cb4-8a25-c70f11b92aba\") pod \"ovsdbserver-nb-0\" (UID: \"f8c13e8e-fec9-49e2-a2b0-5ca0473d2469\") " pod="openstack/ovsdbserver-nb-0" Dec 05 17:32:05 crc kubenswrapper[4753]: I1205 17:32:05.086735 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 05 17:32:09 crc kubenswrapper[4753]: I1205 17:32:09.485961 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 05 17:32:09 crc kubenswrapper[4753]: I1205 17:32:09.501035 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 05 17:32:09 crc kubenswrapper[4753]: I1205 17:32:09.502164 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 05 17:32:09 crc kubenswrapper[4753]: I1205 17:32:09.506029 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Dec 05 17:32:09 crc kubenswrapper[4753]: I1205 17:32:09.506257 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Dec 05 17:32:09 crc kubenswrapper[4753]: I1205 17:32:09.506350 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Dec 05 17:32:09 crc kubenswrapper[4753]: I1205 17:32:09.507569 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-wfjfr" Dec 05 17:32:09 crc kubenswrapper[4753]: I1205 17:32:09.692318 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/09e6b220-0a70-4359-93f4-4450b2e458c8-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"09e6b220-0a70-4359-93f4-4450b2e458c8\") " pod="openstack/ovsdbserver-sb-0" Dec 05 17:32:09 crc kubenswrapper[4753]: I1205 17:32:09.692394 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/09e6b220-0a70-4359-93f4-4450b2e458c8-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"09e6b220-0a70-4359-93f4-4450b2e458c8\") " pod="openstack/ovsdbserver-sb-0" Dec 05 17:32:09 crc kubenswrapper[4753]: I1205 17:32:09.692618 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09e6b220-0a70-4359-93f4-4450b2e458c8-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"09e6b220-0a70-4359-93f4-4450b2e458c8\") " pod="openstack/ovsdbserver-sb-0" Dec 05 17:32:09 crc kubenswrapper[4753]: I1205 17:32:09.692669 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/09e6b220-0a70-4359-93f4-4450b2e458c8-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"09e6b220-0a70-4359-93f4-4450b2e458c8\") " pod="openstack/ovsdbserver-sb-0" Dec 05 17:32:09 crc kubenswrapper[4753]: I1205 17:32:09.692706 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/09e6b220-0a70-4359-93f4-4450b2e458c8-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"09e6b220-0a70-4359-93f4-4450b2e458c8\") " pod="openstack/ovsdbserver-sb-0" Dec 05 17:32:09 crc kubenswrapper[4753]: I1205 17:32:09.692996 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-ca96a219-ea25-4201-b27e-47de77d877c5\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ca96a219-ea25-4201-b27e-47de77d877c5\") pod \"ovsdbserver-sb-0\" (UID: \"09e6b220-0a70-4359-93f4-4450b2e458c8\") " pod="openstack/ovsdbserver-sb-0" Dec 05 17:32:09 crc kubenswrapper[4753]: I1205 17:32:09.693098 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bvjlc\" (UniqueName: \"kubernetes.io/projected/09e6b220-0a70-4359-93f4-4450b2e458c8-kube-api-access-bvjlc\") pod \"ovsdbserver-sb-0\" (UID: \"09e6b220-0a70-4359-93f4-4450b2e458c8\") " pod="openstack/ovsdbserver-sb-0" Dec 05 17:32:09 crc kubenswrapper[4753]: I1205 17:32:09.693213 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09e6b220-0a70-4359-93f4-4450b2e458c8-config\") pod \"ovsdbserver-sb-0\" (UID: \"09e6b220-0a70-4359-93f4-4450b2e458c8\") " pod="openstack/ovsdbserver-sb-0" Dec 05 17:32:09 crc kubenswrapper[4753]: I1205 17:32:09.794517 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/09e6b220-0a70-4359-93f4-4450b2e458c8-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"09e6b220-0a70-4359-93f4-4450b2e458c8\") " pod="openstack/ovsdbserver-sb-0" Dec 05 17:32:09 crc kubenswrapper[4753]: I1205 17:32:09.794575 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/09e6b220-0a70-4359-93f4-4450b2e458c8-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"09e6b220-0a70-4359-93f4-4450b2e458c8\") " pod="openstack/ovsdbserver-sb-0" Dec 05 17:32:09 crc kubenswrapper[4753]: I1205 17:32:09.794648 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-ca96a219-ea25-4201-b27e-47de77d877c5\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ca96a219-ea25-4201-b27e-47de77d877c5\") pod \"ovsdbserver-sb-0\" (UID: \"09e6b220-0a70-4359-93f4-4450b2e458c8\") " pod="openstack/ovsdbserver-sb-0" Dec 05 17:32:09 crc kubenswrapper[4753]: I1205 17:32:09.794681 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bvjlc\" (UniqueName: \"kubernetes.io/projected/09e6b220-0a70-4359-93f4-4450b2e458c8-kube-api-access-bvjlc\") pod \"ovsdbserver-sb-0\" (UID: \"09e6b220-0a70-4359-93f4-4450b2e458c8\") " pod="openstack/ovsdbserver-sb-0" Dec 05 17:32:09 crc kubenswrapper[4753]: I1205 17:32:09.794715 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09e6b220-0a70-4359-93f4-4450b2e458c8-config\") pod \"ovsdbserver-sb-0\" (UID: \"09e6b220-0a70-4359-93f4-4450b2e458c8\") " pod="openstack/ovsdbserver-sb-0" Dec 05 17:32:09 crc kubenswrapper[4753]: I1205 17:32:09.794752 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/09e6b220-0a70-4359-93f4-4450b2e458c8-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"09e6b220-0a70-4359-93f4-4450b2e458c8\") " pod="openstack/ovsdbserver-sb-0" Dec 05 17:32:09 crc kubenswrapper[4753]: I1205 17:32:09.794790 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/09e6b220-0a70-4359-93f4-4450b2e458c8-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"09e6b220-0a70-4359-93f4-4450b2e458c8\") " pod="openstack/ovsdbserver-sb-0" Dec 05 17:32:09 crc kubenswrapper[4753]: I1205 17:32:09.794850 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09e6b220-0a70-4359-93f4-4450b2e458c8-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"09e6b220-0a70-4359-93f4-4450b2e458c8\") " pod="openstack/ovsdbserver-sb-0" Dec 05 17:32:09 crc kubenswrapper[4753]: I1205 17:32:09.795869 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09e6b220-0a70-4359-93f4-4450b2e458c8-config\") pod \"ovsdbserver-sb-0\" (UID: \"09e6b220-0a70-4359-93f4-4450b2e458c8\") " pod="openstack/ovsdbserver-sb-0" Dec 05 17:32:09 crc kubenswrapper[4753]: I1205 17:32:09.796174 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/09e6b220-0a70-4359-93f4-4450b2e458c8-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"09e6b220-0a70-4359-93f4-4450b2e458c8\") " pod="openstack/ovsdbserver-sb-0" Dec 05 17:32:09 crc kubenswrapper[4753]: I1205 17:32:09.796420 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/09e6b220-0a70-4359-93f4-4450b2e458c8-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"09e6b220-0a70-4359-93f4-4450b2e458c8\") " pod="openstack/ovsdbserver-sb-0" Dec 05 17:32:09 crc kubenswrapper[4753]: I1205 17:32:09.798267 4753 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 05 17:32:09 crc kubenswrapper[4753]: I1205 17:32:09.798310 4753 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-ca96a219-ea25-4201-b27e-47de77d877c5\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ca96a219-ea25-4201-b27e-47de77d877c5\") pod \"ovsdbserver-sb-0\" (UID: \"09e6b220-0a70-4359-93f4-4450b2e458c8\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/3b833afea68f1825a3471e0057fb34f7f04f7d60274edb1554a9dd4e2d762d91/globalmount\"" pod="openstack/ovsdbserver-sb-0" Dec 05 17:32:09 crc kubenswrapper[4753]: I1205 17:32:09.801408 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09e6b220-0a70-4359-93f4-4450b2e458c8-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"09e6b220-0a70-4359-93f4-4450b2e458c8\") " pod="openstack/ovsdbserver-sb-0" Dec 05 17:32:09 crc kubenswrapper[4753]: I1205 17:32:09.802831 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/09e6b220-0a70-4359-93f4-4450b2e458c8-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"09e6b220-0a70-4359-93f4-4450b2e458c8\") " pod="openstack/ovsdbserver-sb-0" Dec 05 17:32:09 crc kubenswrapper[4753]: I1205 17:32:09.811401 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/09e6b220-0a70-4359-93f4-4450b2e458c8-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"09e6b220-0a70-4359-93f4-4450b2e458c8\") " pod="openstack/ovsdbserver-sb-0" Dec 05 17:32:09 crc kubenswrapper[4753]: I1205 17:32:09.818201 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bvjlc\" (UniqueName: \"kubernetes.io/projected/09e6b220-0a70-4359-93f4-4450b2e458c8-kube-api-access-bvjlc\") pod \"ovsdbserver-sb-0\" (UID: \"09e6b220-0a70-4359-93f4-4450b2e458c8\") " pod="openstack/ovsdbserver-sb-0" Dec 05 17:32:09 crc kubenswrapper[4753]: I1205 17:32:09.835455 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-ca96a219-ea25-4201-b27e-47de77d877c5\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-ca96a219-ea25-4201-b27e-47de77d877c5\") pod \"ovsdbserver-sb-0\" (UID: \"09e6b220-0a70-4359-93f4-4450b2e458c8\") " pod="openstack/ovsdbserver-sb-0" Dec 05 17:32:10 crc kubenswrapper[4753]: I1205 17:32:10.124195 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 05 17:32:12 crc kubenswrapper[4753]: I1205 17:32:12.819242 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-lokistack-distributor-664b687b54-jj67d"] Dec 05 17:32:12 crc kubenswrapper[4753]: I1205 17:32:12.820793 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-distributor-664b687b54-jj67d" Dec 05 17:32:12 crc kubenswrapper[4753]: I1205 17:32:12.830942 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"cloudkitty-lokistack-ca-bundle" Dec 05 17:32:12 crc kubenswrapper[4753]: I1205 17:32:12.831050 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"cloudkitty-lokistack-config" Dec 05 17:32:12 crc kubenswrapper[4753]: I1205 17:32:12.831310 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-distributor-grpc" Dec 05 17:32:12 crc kubenswrapper[4753]: I1205 17:32:12.831432 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-dockercfg-djr9f" Dec 05 17:32:12 crc kubenswrapper[4753]: I1205 17:32:12.835816 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-distributor-http" Dec 05 17:32:12 crc kubenswrapper[4753]: I1205 17:32:12.839807 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-distributor-664b687b54-jj67d"] Dec 05 17:32:12 crc kubenswrapper[4753]: I1205 17:32:12.949850 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/16d583e9-9ea0-4222-a38a-f8e1be33cdae-config\") pod \"cloudkitty-lokistack-distributor-664b687b54-jj67d\" (UID: \"16d583e9-9ea0-4222-a38a-f8e1be33cdae\") " pod="openstack/cloudkitty-lokistack-distributor-664b687b54-jj67d" Dec 05 17:32:12 crc kubenswrapper[4753]: I1205 17:32:12.949912 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-distributor-grpc\" (UniqueName: \"kubernetes.io/secret/16d583e9-9ea0-4222-a38a-f8e1be33cdae-cloudkitty-lokistack-distributor-grpc\") pod \"cloudkitty-lokistack-distributor-664b687b54-jj67d\" (UID: \"16d583e9-9ea0-4222-a38a-f8e1be33cdae\") " pod="openstack/cloudkitty-lokistack-distributor-664b687b54-jj67d" Dec 05 17:32:12 crc kubenswrapper[4753]: I1205 17:32:12.949949 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/16d583e9-9ea0-4222-a38a-f8e1be33cdae-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-distributor-664b687b54-jj67d\" (UID: \"16d583e9-9ea0-4222-a38a-f8e1be33cdae\") " pod="openstack/cloudkitty-lokistack-distributor-664b687b54-jj67d" Dec 05 17:32:12 crc kubenswrapper[4753]: I1205 17:32:12.953141 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ps94k\" (UniqueName: \"kubernetes.io/projected/16d583e9-9ea0-4222-a38a-f8e1be33cdae-kube-api-access-ps94k\") pod \"cloudkitty-lokistack-distributor-664b687b54-jj67d\" (UID: \"16d583e9-9ea0-4222-a38a-f8e1be33cdae\") " pod="openstack/cloudkitty-lokistack-distributor-664b687b54-jj67d" Dec 05 17:32:12 crc kubenswrapper[4753]: I1205 17:32:12.953215 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-distributor-http\" (UniqueName: \"kubernetes.io/secret/16d583e9-9ea0-4222-a38a-f8e1be33cdae-cloudkitty-lokistack-distributor-http\") pod \"cloudkitty-lokistack-distributor-664b687b54-jj67d\" (UID: \"16d583e9-9ea0-4222-a38a-f8e1be33cdae\") " pod="openstack/cloudkitty-lokistack-distributor-664b687b54-jj67d" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.028167 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-lokistack-querier-5467947bf7-7l6pk"] Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.029520 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-querier-5467947bf7-7l6pk" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.035530 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-querier-http" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.035895 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-querier-grpc" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.036363 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-loki-s3" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.042881 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-querier-5467947bf7-7l6pk"] Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.054755 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/16d583e9-9ea0-4222-a38a-f8e1be33cdae-config\") pod \"cloudkitty-lokistack-distributor-664b687b54-jj67d\" (UID: \"16d583e9-9ea0-4222-a38a-f8e1be33cdae\") " pod="openstack/cloudkitty-lokistack-distributor-664b687b54-jj67d" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.054804 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-distributor-grpc\" (UniqueName: \"kubernetes.io/secret/16d583e9-9ea0-4222-a38a-f8e1be33cdae-cloudkitty-lokistack-distributor-grpc\") pod \"cloudkitty-lokistack-distributor-664b687b54-jj67d\" (UID: \"16d583e9-9ea0-4222-a38a-f8e1be33cdae\") " pod="openstack/cloudkitty-lokistack-distributor-664b687b54-jj67d" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.054836 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/16d583e9-9ea0-4222-a38a-f8e1be33cdae-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-distributor-664b687b54-jj67d\" (UID: \"16d583e9-9ea0-4222-a38a-f8e1be33cdae\") " pod="openstack/cloudkitty-lokistack-distributor-664b687b54-jj67d" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.054934 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ps94k\" (UniqueName: \"kubernetes.io/projected/16d583e9-9ea0-4222-a38a-f8e1be33cdae-kube-api-access-ps94k\") pod \"cloudkitty-lokistack-distributor-664b687b54-jj67d\" (UID: \"16d583e9-9ea0-4222-a38a-f8e1be33cdae\") " pod="openstack/cloudkitty-lokistack-distributor-664b687b54-jj67d" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.054967 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-distributor-http\" (UniqueName: \"kubernetes.io/secret/16d583e9-9ea0-4222-a38a-f8e1be33cdae-cloudkitty-lokistack-distributor-http\") pod \"cloudkitty-lokistack-distributor-664b687b54-jj67d\" (UID: \"16d583e9-9ea0-4222-a38a-f8e1be33cdae\") " pod="openstack/cloudkitty-lokistack-distributor-664b687b54-jj67d" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.057950 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/16d583e9-9ea0-4222-a38a-f8e1be33cdae-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-distributor-664b687b54-jj67d\" (UID: \"16d583e9-9ea0-4222-a38a-f8e1be33cdae\") " pod="openstack/cloudkitty-lokistack-distributor-664b687b54-jj67d" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.074555 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-distributor-grpc\" (UniqueName: \"kubernetes.io/secret/16d583e9-9ea0-4222-a38a-f8e1be33cdae-cloudkitty-lokistack-distributor-grpc\") pod \"cloudkitty-lokistack-distributor-664b687b54-jj67d\" (UID: \"16d583e9-9ea0-4222-a38a-f8e1be33cdae\") " pod="openstack/cloudkitty-lokistack-distributor-664b687b54-jj67d" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.060888 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/16d583e9-9ea0-4222-a38a-f8e1be33cdae-config\") pod \"cloudkitty-lokistack-distributor-664b687b54-jj67d\" (UID: \"16d583e9-9ea0-4222-a38a-f8e1be33cdae\") " pod="openstack/cloudkitty-lokistack-distributor-664b687b54-jj67d" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.078927 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-distributor-http\" (UniqueName: \"kubernetes.io/secret/16d583e9-9ea0-4222-a38a-f8e1be33cdae-cloudkitty-lokistack-distributor-http\") pod \"cloudkitty-lokistack-distributor-664b687b54-jj67d\" (UID: \"16d583e9-9ea0-4222-a38a-f8e1be33cdae\") " pod="openstack/cloudkitty-lokistack-distributor-664b687b54-jj67d" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.108542 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ps94k\" (UniqueName: \"kubernetes.io/projected/16d583e9-9ea0-4222-a38a-f8e1be33cdae-kube-api-access-ps94k\") pod \"cloudkitty-lokistack-distributor-664b687b54-jj67d\" (UID: \"16d583e9-9ea0-4222-a38a-f8e1be33cdae\") " pod="openstack/cloudkitty-lokistack-distributor-664b687b54-jj67d" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.140240 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-vh5xk"] Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.141778 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-vh5xk" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.151444 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-query-frontend-http" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.157676 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-distributor-664b687b54-jj67d" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.160305 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-vh5xk"] Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.160729 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-query-frontend-grpc" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.161707 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/89ac2139-b38d-40b1-939d-b23748c819d0-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-querier-5467947bf7-7l6pk\" (UID: \"89ac2139-b38d-40b1-939d-b23748c819d0\") " pod="openstack/cloudkitty-lokistack-querier-5467947bf7-7l6pk" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.161907 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-query-frontend-grpc\" (UniqueName: \"kubernetes.io/secret/10df4aa4-d920-45d4-9592-72c32d59c312-cloudkitty-lokistack-query-frontend-grpc\") pod \"cloudkitty-lokistack-query-frontend-7c8cd744d9-vh5xk\" (UID: \"10df4aa4-d920-45d4-9592-72c32d59c312\") " pod="openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-vh5xk" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.162423 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/10df4aa4-d920-45d4-9592-72c32d59c312-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-query-frontend-7c8cd744d9-vh5xk\" (UID: \"10df4aa4-d920-45d4-9592-72c32d59c312\") " pod="openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-vh5xk" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.162452 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-querier-grpc\" (UniqueName: \"kubernetes.io/secret/89ac2139-b38d-40b1-939d-b23748c819d0-cloudkitty-lokistack-querier-grpc\") pod \"cloudkitty-lokistack-querier-5467947bf7-7l6pk\" (UID: \"89ac2139-b38d-40b1-939d-b23748c819d0\") " pod="openstack/cloudkitty-lokistack-querier-5467947bf7-7l6pk" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.162502 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9lbsr\" (UniqueName: \"kubernetes.io/projected/89ac2139-b38d-40b1-939d-b23748c819d0-kube-api-access-9lbsr\") pod \"cloudkitty-lokistack-querier-5467947bf7-7l6pk\" (UID: \"89ac2139-b38d-40b1-939d-b23748c819d0\") " pod="openstack/cloudkitty-lokistack-querier-5467947bf7-7l6pk" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.162521 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-querier-http\" (UniqueName: \"kubernetes.io/secret/89ac2139-b38d-40b1-939d-b23748c819d0-cloudkitty-lokistack-querier-http\") pod \"cloudkitty-lokistack-querier-5467947bf7-7l6pk\" (UID: \"89ac2139-b38d-40b1-939d-b23748c819d0\") " pod="openstack/cloudkitty-lokistack-querier-5467947bf7-7l6pk" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.162537 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s8mp4\" (UniqueName: \"kubernetes.io/projected/10df4aa4-d920-45d4-9592-72c32d59c312-kube-api-access-s8mp4\") pod \"cloudkitty-lokistack-query-frontend-7c8cd744d9-vh5xk\" (UID: \"10df4aa4-d920-45d4-9592-72c32d59c312\") " pod="openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-vh5xk" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.162754 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/89ac2139-b38d-40b1-939d-b23748c819d0-config\") pod \"cloudkitty-lokistack-querier-5467947bf7-7l6pk\" (UID: \"89ac2139-b38d-40b1-939d-b23748c819d0\") " pod="openstack/cloudkitty-lokistack-querier-5467947bf7-7l6pk" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.162772 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/10df4aa4-d920-45d4-9592-72c32d59c312-config\") pod \"cloudkitty-lokistack-query-frontend-7c8cd744d9-vh5xk\" (UID: \"10df4aa4-d920-45d4-9592-72c32d59c312\") " pod="openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-vh5xk" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.162809 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-query-frontend-http\" (UniqueName: \"kubernetes.io/secret/10df4aa4-d920-45d4-9592-72c32d59c312-cloudkitty-lokistack-query-frontend-http\") pod \"cloudkitty-lokistack-query-frontend-7c8cd744d9-vh5xk\" (UID: \"10df4aa4-d920-45d4-9592-72c32d59c312\") " pod="openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-vh5xk" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.162839 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/89ac2139-b38d-40b1-939d-b23748c819d0-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-querier-5467947bf7-7l6pk\" (UID: \"89ac2139-b38d-40b1-939d-b23748c819d0\") " pod="openstack/cloudkitty-lokistack-querier-5467947bf7-7l6pk" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.262352 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-lokistack-gateway-bc75944f-pqqjw"] Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.263918 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-gateway-bc75944f-pqqjw" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.264563 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/89ac2139-b38d-40b1-939d-b23748c819d0-config\") pod \"cloudkitty-lokistack-querier-5467947bf7-7l6pk\" (UID: \"89ac2139-b38d-40b1-939d-b23748c819d0\") " pod="openstack/cloudkitty-lokistack-querier-5467947bf7-7l6pk" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.264607 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/10df4aa4-d920-45d4-9592-72c32d59c312-config\") pod \"cloudkitty-lokistack-query-frontend-7c8cd744d9-vh5xk\" (UID: \"10df4aa4-d920-45d4-9592-72c32d59c312\") " pod="openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-vh5xk" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.264632 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-query-frontend-http\" (UniqueName: \"kubernetes.io/secret/10df4aa4-d920-45d4-9592-72c32d59c312-cloudkitty-lokistack-query-frontend-http\") pod \"cloudkitty-lokistack-query-frontend-7c8cd744d9-vh5xk\" (UID: \"10df4aa4-d920-45d4-9592-72c32d59c312\") " pod="openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-vh5xk" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.264662 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/89ac2139-b38d-40b1-939d-b23748c819d0-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-querier-5467947bf7-7l6pk\" (UID: \"89ac2139-b38d-40b1-939d-b23748c819d0\") " pod="openstack/cloudkitty-lokistack-querier-5467947bf7-7l6pk" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.264723 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/89ac2139-b38d-40b1-939d-b23748c819d0-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-querier-5467947bf7-7l6pk\" (UID: \"89ac2139-b38d-40b1-939d-b23748c819d0\") " pod="openstack/cloudkitty-lokistack-querier-5467947bf7-7l6pk" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.264751 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-query-frontend-grpc\" (UniqueName: \"kubernetes.io/secret/10df4aa4-d920-45d4-9592-72c32d59c312-cloudkitty-lokistack-query-frontend-grpc\") pod \"cloudkitty-lokistack-query-frontend-7c8cd744d9-vh5xk\" (UID: \"10df4aa4-d920-45d4-9592-72c32d59c312\") " pod="openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-vh5xk" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.264818 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/10df4aa4-d920-45d4-9592-72c32d59c312-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-query-frontend-7c8cd744d9-vh5xk\" (UID: \"10df4aa4-d920-45d4-9592-72c32d59c312\") " pod="openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-vh5xk" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.264835 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-querier-grpc\" (UniqueName: \"kubernetes.io/secret/89ac2139-b38d-40b1-939d-b23748c819d0-cloudkitty-lokistack-querier-grpc\") pod \"cloudkitty-lokistack-querier-5467947bf7-7l6pk\" (UID: \"89ac2139-b38d-40b1-939d-b23748c819d0\") " pod="openstack/cloudkitty-lokistack-querier-5467947bf7-7l6pk" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.264857 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9lbsr\" (UniqueName: \"kubernetes.io/projected/89ac2139-b38d-40b1-939d-b23748c819d0-kube-api-access-9lbsr\") pod \"cloudkitty-lokistack-querier-5467947bf7-7l6pk\" (UID: \"89ac2139-b38d-40b1-939d-b23748c819d0\") " pod="openstack/cloudkitty-lokistack-querier-5467947bf7-7l6pk" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.264875 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-querier-http\" (UniqueName: \"kubernetes.io/secret/89ac2139-b38d-40b1-939d-b23748c819d0-cloudkitty-lokistack-querier-http\") pod \"cloudkitty-lokistack-querier-5467947bf7-7l6pk\" (UID: \"89ac2139-b38d-40b1-939d-b23748c819d0\") " pod="openstack/cloudkitty-lokistack-querier-5467947bf7-7l6pk" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.264893 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s8mp4\" (UniqueName: \"kubernetes.io/projected/10df4aa4-d920-45d4-9592-72c32d59c312-kube-api-access-s8mp4\") pod \"cloudkitty-lokistack-query-frontend-7c8cd744d9-vh5xk\" (UID: \"10df4aa4-d920-45d4-9592-72c32d59c312\") " pod="openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-vh5xk" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.265894 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/89ac2139-b38d-40b1-939d-b23748c819d0-config\") pod \"cloudkitty-lokistack-querier-5467947bf7-7l6pk\" (UID: \"89ac2139-b38d-40b1-939d-b23748c819d0\") " pod="openstack/cloudkitty-lokistack-querier-5467947bf7-7l6pk" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.266171 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/10df4aa4-d920-45d4-9592-72c32d59c312-config\") pod \"cloudkitty-lokistack-query-frontend-7c8cd744d9-vh5xk\" (UID: \"10df4aa4-d920-45d4-9592-72c32d59c312\") " pod="openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-vh5xk" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.266654 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/10df4aa4-d920-45d4-9592-72c32d59c312-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-query-frontend-7c8cd744d9-vh5xk\" (UID: \"10df4aa4-d920-45d4-9592-72c32d59c312\") " pod="openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-vh5xk" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.269452 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-gateway" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.269617 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"cloudkitty-lokistack-gateway-ca-bundle" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.269721 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"cloudkitty-lokistack-gateway" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.269825 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-gateway-http" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.270660 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/89ac2139-b38d-40b1-939d-b23748c819d0-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-querier-5467947bf7-7l6pk\" (UID: \"89ac2139-b38d-40b1-939d-b23748c819d0\") " pod="openstack/cloudkitty-lokistack-querier-5467947bf7-7l6pk" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.279734 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"cloudkitty-lokistack-ca" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.279898 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-query-frontend-http\" (UniqueName: \"kubernetes.io/secret/10df4aa4-d920-45d4-9592-72c32d59c312-cloudkitty-lokistack-query-frontend-http\") pod \"cloudkitty-lokistack-query-frontend-7c8cd744d9-vh5xk\" (UID: \"10df4aa4-d920-45d4-9592-72c32d59c312\") " pod="openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-vh5xk" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.281052 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/89ac2139-b38d-40b1-939d-b23748c819d0-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-querier-5467947bf7-7l6pk\" (UID: \"89ac2139-b38d-40b1-939d-b23748c819d0\") " pod="openstack/cloudkitty-lokistack-querier-5467947bf7-7l6pk" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.284193 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-gateway-client-http" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.285630 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-querier-grpc\" (UniqueName: \"kubernetes.io/secret/89ac2139-b38d-40b1-939d-b23748c819d0-cloudkitty-lokistack-querier-grpc\") pod \"cloudkitty-lokistack-querier-5467947bf7-7l6pk\" (UID: \"89ac2139-b38d-40b1-939d-b23748c819d0\") " pod="openstack/cloudkitty-lokistack-querier-5467947bf7-7l6pk" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.293538 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9lbsr\" (UniqueName: \"kubernetes.io/projected/89ac2139-b38d-40b1-939d-b23748c819d0-kube-api-access-9lbsr\") pod \"cloudkitty-lokistack-querier-5467947bf7-7l6pk\" (UID: \"89ac2139-b38d-40b1-939d-b23748c819d0\") " pod="openstack/cloudkitty-lokistack-querier-5467947bf7-7l6pk" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.296492 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-querier-http\" (UniqueName: \"kubernetes.io/secret/89ac2139-b38d-40b1-939d-b23748c819d0-cloudkitty-lokistack-querier-http\") pod \"cloudkitty-lokistack-querier-5467947bf7-7l6pk\" (UID: \"89ac2139-b38d-40b1-939d-b23748c819d0\") " pod="openstack/cloudkitty-lokistack-querier-5467947bf7-7l6pk" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.300762 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-query-frontend-grpc\" (UniqueName: \"kubernetes.io/secret/10df4aa4-d920-45d4-9592-72c32d59c312-cloudkitty-lokistack-query-frontend-grpc\") pod \"cloudkitty-lokistack-query-frontend-7c8cd744d9-vh5xk\" (UID: \"10df4aa4-d920-45d4-9592-72c32d59c312\") " pod="openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-vh5xk" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.304823 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-gateway-bc75944f-pqqjw"] Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.305789 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s8mp4\" (UniqueName: \"kubernetes.io/projected/10df4aa4-d920-45d4-9592-72c32d59c312-kube-api-access-s8mp4\") pod \"cloudkitty-lokistack-query-frontend-7c8cd744d9-vh5xk\" (UID: \"10df4aa4-d920-45d4-9592-72c32d59c312\") " pod="openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-vh5xk" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.321191 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-lokistack-gateway-bc75944f-8prh2"] Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.322378 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-gateway-bc75944f-8prh2" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.325065 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-gateway-dockercfg-tr656" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.340198 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-gateway-bc75944f-8prh2"] Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.368068 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8f82a8e6-b07e-4bf9-801e-04c1f96fe703-cloudkitty-lokistack-gateway-ca-bundle\") pod \"cloudkitty-lokistack-gateway-bc75944f-pqqjw\" (UID: \"8f82a8e6-b07e-4bf9-801e-04c1f96fe703\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-pqqjw" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.368126 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-79szr\" (UniqueName: \"kubernetes.io/projected/389b7205-589e-4027-ae02-ba2287c7e0ed-kube-api-access-79szr\") pod \"cloudkitty-lokistack-gateway-bc75944f-8prh2\" (UID: \"389b7205-589e-4027-ae02-ba2287c7e0ed\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-8prh2" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.368165 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8f82a8e6-b07e-4bf9-801e-04c1f96fe703-cloudkitty-ca-bundle\") pod \"cloudkitty-lokistack-gateway-bc75944f-pqqjw\" (UID: \"8f82a8e6-b07e-4bf9-801e-04c1f96fe703\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-pqqjw" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.368190 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/8f82a8e6-b07e-4bf9-801e-04c1f96fe703-cloudkitty-lokistack-gateway-client-http\") pod \"cloudkitty-lokistack-gateway-bc75944f-pqqjw\" (UID: \"8f82a8e6-b07e-4bf9-801e-04c1f96fe703\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-pqqjw" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.368227 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/389b7205-589e-4027-ae02-ba2287c7e0ed-tenants\") pod \"cloudkitty-lokistack-gateway-bc75944f-8prh2\" (UID: \"389b7205-589e-4027-ae02-ba2287c7e0ed\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-8prh2" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.368248 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/389b7205-589e-4027-ae02-ba2287c7e0ed-cloudkitty-ca-bundle\") pod \"cloudkitty-lokistack-gateway-bc75944f-8prh2\" (UID: \"389b7205-589e-4027-ae02-ba2287c7e0ed\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-8prh2" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.368267 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/389b7205-589e-4027-ae02-ba2287c7e0ed-rbac\") pod \"cloudkitty-lokistack-gateway-bc75944f-8prh2\" (UID: \"389b7205-589e-4027-ae02-ba2287c7e0ed\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-8prh2" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.368286 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xjnzw\" (UniqueName: \"kubernetes.io/projected/8f82a8e6-b07e-4bf9-801e-04c1f96fe703-kube-api-access-xjnzw\") pod \"cloudkitty-lokistack-gateway-bc75944f-pqqjw\" (UID: \"8f82a8e6-b07e-4bf9-801e-04c1f96fe703\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-pqqjw" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.368453 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/389b7205-589e-4027-ae02-ba2287c7e0ed-cloudkitty-lokistack-gateway-client-http\") pod \"cloudkitty-lokistack-gateway-bc75944f-8prh2\" (UID: \"389b7205-589e-4027-ae02-ba2287c7e0ed\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-8prh2" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.368530 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/389b7205-589e-4027-ae02-ba2287c7e0ed-lokistack-gateway\") pod \"cloudkitty-lokistack-gateway-bc75944f-8prh2\" (UID: \"389b7205-589e-4027-ae02-ba2287c7e0ed\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-8prh2" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.368573 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/389b7205-589e-4027-ae02-ba2287c7e0ed-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-gateway-bc75944f-8prh2\" (UID: \"389b7205-589e-4027-ae02-ba2287c7e0ed\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-8prh2" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.368618 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/8f82a8e6-b07e-4bf9-801e-04c1f96fe703-lokistack-gateway\") pod \"cloudkitty-lokistack-gateway-bc75944f-pqqjw\" (UID: \"8f82a8e6-b07e-4bf9-801e-04c1f96fe703\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-pqqjw" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.368735 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/8f82a8e6-b07e-4bf9-801e-04c1f96fe703-rbac\") pod \"cloudkitty-lokistack-gateway-bc75944f-pqqjw\" (UID: \"8f82a8e6-b07e-4bf9-801e-04c1f96fe703\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-pqqjw" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.368779 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/389b7205-589e-4027-ae02-ba2287c7e0ed-tls-secret\") pod \"cloudkitty-lokistack-gateway-bc75944f-8prh2\" (UID: \"389b7205-589e-4027-ae02-ba2287c7e0ed\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-8prh2" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.368851 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/8f82a8e6-b07e-4bf9-801e-04c1f96fe703-tenants\") pod \"cloudkitty-lokistack-gateway-bc75944f-pqqjw\" (UID: \"8f82a8e6-b07e-4bf9-801e-04c1f96fe703\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-pqqjw" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.368902 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/8f82a8e6-b07e-4bf9-801e-04c1f96fe703-tls-secret\") pod \"cloudkitty-lokistack-gateway-bc75944f-pqqjw\" (UID: \"8f82a8e6-b07e-4bf9-801e-04c1f96fe703\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-pqqjw" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.368955 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8f82a8e6-b07e-4bf9-801e-04c1f96fe703-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-gateway-bc75944f-pqqjw\" (UID: \"8f82a8e6-b07e-4bf9-801e-04c1f96fe703\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-pqqjw" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.368980 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/389b7205-589e-4027-ae02-ba2287c7e0ed-cloudkitty-lokistack-gateway-ca-bundle\") pod \"cloudkitty-lokistack-gateway-bc75944f-8prh2\" (UID: \"389b7205-589e-4027-ae02-ba2287c7e0ed\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-8prh2" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.462744 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-querier-5467947bf7-7l6pk" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.470368 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/389b7205-589e-4027-ae02-ba2287c7e0ed-tls-secret\") pod \"cloudkitty-lokistack-gateway-bc75944f-8prh2\" (UID: \"389b7205-589e-4027-ae02-ba2287c7e0ed\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-8prh2" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.470423 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/8f82a8e6-b07e-4bf9-801e-04c1f96fe703-tenants\") pod \"cloudkitty-lokistack-gateway-bc75944f-pqqjw\" (UID: \"8f82a8e6-b07e-4bf9-801e-04c1f96fe703\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-pqqjw" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.470452 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/8f82a8e6-b07e-4bf9-801e-04c1f96fe703-tls-secret\") pod \"cloudkitty-lokistack-gateway-bc75944f-pqqjw\" (UID: \"8f82a8e6-b07e-4bf9-801e-04c1f96fe703\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-pqqjw" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.470480 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8f82a8e6-b07e-4bf9-801e-04c1f96fe703-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-gateway-bc75944f-pqqjw\" (UID: \"8f82a8e6-b07e-4bf9-801e-04c1f96fe703\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-pqqjw" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.470503 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/389b7205-589e-4027-ae02-ba2287c7e0ed-cloudkitty-lokistack-gateway-ca-bundle\") pod \"cloudkitty-lokistack-gateway-bc75944f-8prh2\" (UID: \"389b7205-589e-4027-ae02-ba2287c7e0ed\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-8prh2" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.470522 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8f82a8e6-b07e-4bf9-801e-04c1f96fe703-cloudkitty-lokistack-gateway-ca-bundle\") pod \"cloudkitty-lokistack-gateway-bc75944f-pqqjw\" (UID: \"8f82a8e6-b07e-4bf9-801e-04c1f96fe703\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-pqqjw" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.470545 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-79szr\" (UniqueName: \"kubernetes.io/projected/389b7205-589e-4027-ae02-ba2287c7e0ed-kube-api-access-79szr\") pod \"cloudkitty-lokistack-gateway-bc75944f-8prh2\" (UID: \"389b7205-589e-4027-ae02-ba2287c7e0ed\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-8prh2" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.470563 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8f82a8e6-b07e-4bf9-801e-04c1f96fe703-cloudkitty-ca-bundle\") pod \"cloudkitty-lokistack-gateway-bc75944f-pqqjw\" (UID: \"8f82a8e6-b07e-4bf9-801e-04c1f96fe703\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-pqqjw" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.470581 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/8f82a8e6-b07e-4bf9-801e-04c1f96fe703-cloudkitty-lokistack-gateway-client-http\") pod \"cloudkitty-lokistack-gateway-bc75944f-pqqjw\" (UID: \"8f82a8e6-b07e-4bf9-801e-04c1f96fe703\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-pqqjw" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.470616 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/389b7205-589e-4027-ae02-ba2287c7e0ed-tenants\") pod \"cloudkitty-lokistack-gateway-bc75944f-8prh2\" (UID: \"389b7205-589e-4027-ae02-ba2287c7e0ed\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-8prh2" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.470634 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/389b7205-589e-4027-ae02-ba2287c7e0ed-cloudkitty-ca-bundle\") pod \"cloudkitty-lokistack-gateway-bc75944f-8prh2\" (UID: \"389b7205-589e-4027-ae02-ba2287c7e0ed\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-8prh2" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.470650 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/389b7205-589e-4027-ae02-ba2287c7e0ed-rbac\") pod \"cloudkitty-lokistack-gateway-bc75944f-8prh2\" (UID: \"389b7205-589e-4027-ae02-ba2287c7e0ed\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-8prh2" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.470665 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xjnzw\" (UniqueName: \"kubernetes.io/projected/8f82a8e6-b07e-4bf9-801e-04c1f96fe703-kube-api-access-xjnzw\") pod \"cloudkitty-lokistack-gateway-bc75944f-pqqjw\" (UID: \"8f82a8e6-b07e-4bf9-801e-04c1f96fe703\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-pqqjw" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.470688 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/389b7205-589e-4027-ae02-ba2287c7e0ed-cloudkitty-lokistack-gateway-client-http\") pod \"cloudkitty-lokistack-gateway-bc75944f-8prh2\" (UID: \"389b7205-589e-4027-ae02-ba2287c7e0ed\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-8prh2" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.470710 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/389b7205-589e-4027-ae02-ba2287c7e0ed-lokistack-gateway\") pod \"cloudkitty-lokistack-gateway-bc75944f-8prh2\" (UID: \"389b7205-589e-4027-ae02-ba2287c7e0ed\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-8prh2" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.470747 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/389b7205-589e-4027-ae02-ba2287c7e0ed-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-gateway-bc75944f-8prh2\" (UID: \"389b7205-589e-4027-ae02-ba2287c7e0ed\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-8prh2" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.470771 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/8f82a8e6-b07e-4bf9-801e-04c1f96fe703-lokistack-gateway\") pod \"cloudkitty-lokistack-gateway-bc75944f-pqqjw\" (UID: \"8f82a8e6-b07e-4bf9-801e-04c1f96fe703\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-pqqjw" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.470811 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/8f82a8e6-b07e-4bf9-801e-04c1f96fe703-rbac\") pod \"cloudkitty-lokistack-gateway-bc75944f-pqqjw\" (UID: \"8f82a8e6-b07e-4bf9-801e-04c1f96fe703\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-pqqjw" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.471808 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/8f82a8e6-b07e-4bf9-801e-04c1f96fe703-rbac\") pod \"cloudkitty-lokistack-gateway-bc75944f-pqqjw\" (UID: \"8f82a8e6-b07e-4bf9-801e-04c1f96fe703\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-pqqjw" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.473377 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/389b7205-589e-4027-ae02-ba2287c7e0ed-cloudkitty-ca-bundle\") pod \"cloudkitty-lokistack-gateway-bc75944f-8prh2\" (UID: \"389b7205-589e-4027-ae02-ba2287c7e0ed\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-8prh2" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.473853 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/389b7205-589e-4027-ae02-ba2287c7e0ed-lokistack-gateway\") pod \"cloudkitty-lokistack-gateway-bc75944f-8prh2\" (UID: \"389b7205-589e-4027-ae02-ba2287c7e0ed\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-8prh2" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.474067 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rbac\" (UniqueName: \"kubernetes.io/configmap/389b7205-589e-4027-ae02-ba2287c7e0ed-rbac\") pod \"cloudkitty-lokistack-gateway-bc75944f-8prh2\" (UID: \"389b7205-589e-4027-ae02-ba2287c7e0ed\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-8prh2" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.475641 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/389b7205-589e-4027-ae02-ba2287c7e0ed-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-gateway-bc75944f-8prh2\" (UID: \"389b7205-589e-4027-ae02-ba2287c7e0ed\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-8prh2" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.475679 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8f82a8e6-b07e-4bf9-801e-04c1f96fe703-cloudkitty-lokistack-gateway-ca-bundle\") pod \"cloudkitty-lokistack-gateway-bc75944f-pqqjw\" (UID: \"8f82a8e6-b07e-4bf9-801e-04c1f96fe703\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-pqqjw" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.475866 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-gateway-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/389b7205-589e-4027-ae02-ba2287c7e0ed-cloudkitty-lokistack-gateway-ca-bundle\") pod \"cloudkitty-lokistack-gateway-bc75944f-8prh2\" (UID: \"389b7205-589e-4027-ae02-ba2287c7e0ed\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-8prh2" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.476554 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8f82a8e6-b07e-4bf9-801e-04c1f96fe703-cloudkitty-ca-bundle\") pod \"cloudkitty-lokistack-gateway-bc75944f-pqqjw\" (UID: \"8f82a8e6-b07e-4bf9-801e-04c1f96fe703\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-pqqjw" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.477805 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8f82a8e6-b07e-4bf9-801e-04c1f96fe703-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-gateway-bc75944f-pqqjw\" (UID: \"8f82a8e6-b07e-4bf9-801e-04c1f96fe703\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-pqqjw" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.478316 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/389b7205-589e-4027-ae02-ba2287c7e0ed-tenants\") pod \"cloudkitty-lokistack-gateway-bc75944f-8prh2\" (UID: \"389b7205-589e-4027-ae02-ba2287c7e0ed\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-8prh2" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.479187 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/8f82a8e6-b07e-4bf9-801e-04c1f96fe703-tls-secret\") pod \"cloudkitty-lokistack-gateway-bc75944f-pqqjw\" (UID: \"8f82a8e6-b07e-4bf9-801e-04c1f96fe703\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-pqqjw" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.480262 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lokistack-gateway\" (UniqueName: \"kubernetes.io/configmap/8f82a8e6-b07e-4bf9-801e-04c1f96fe703-lokistack-gateway\") pod \"cloudkitty-lokistack-gateway-bc75944f-pqqjw\" (UID: \"8f82a8e6-b07e-4bf9-801e-04c1f96fe703\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-pqqjw" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.483928 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/8f82a8e6-b07e-4bf9-801e-04c1f96fe703-cloudkitty-lokistack-gateway-client-http\") pod \"cloudkitty-lokistack-gateway-bc75944f-pqqjw\" (UID: \"8f82a8e6-b07e-4bf9-801e-04c1f96fe703\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-pqqjw" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.484133 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tenants\" (UniqueName: \"kubernetes.io/secret/8f82a8e6-b07e-4bf9-801e-04c1f96fe703-tenants\") pod \"cloudkitty-lokistack-gateway-bc75944f-pqqjw\" (UID: \"8f82a8e6-b07e-4bf9-801e-04c1f96fe703\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-pqqjw" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.488588 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-vh5xk" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.494463 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-secret\" (UniqueName: \"kubernetes.io/secret/389b7205-589e-4027-ae02-ba2287c7e0ed-tls-secret\") pod \"cloudkitty-lokistack-gateway-bc75944f-8prh2\" (UID: \"389b7205-589e-4027-ae02-ba2287c7e0ed\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-8prh2" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.495003 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-79szr\" (UniqueName: \"kubernetes.io/projected/389b7205-589e-4027-ae02-ba2287c7e0ed-kube-api-access-79szr\") pod \"cloudkitty-lokistack-gateway-bc75944f-8prh2\" (UID: \"389b7205-589e-4027-ae02-ba2287c7e0ed\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-8prh2" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.495644 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xjnzw\" (UniqueName: \"kubernetes.io/projected/8f82a8e6-b07e-4bf9-801e-04c1f96fe703-kube-api-access-xjnzw\") pod \"cloudkitty-lokistack-gateway-bc75944f-pqqjw\" (UID: \"8f82a8e6-b07e-4bf9-801e-04c1f96fe703\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-pqqjw" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.498804 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-gateway-client-http\" (UniqueName: \"kubernetes.io/secret/389b7205-589e-4027-ae02-ba2287c7e0ed-cloudkitty-lokistack-gateway-client-http\") pod \"cloudkitty-lokistack-gateway-bc75944f-8prh2\" (UID: \"389b7205-589e-4027-ae02-ba2287c7e0ed\") " pod="openstack/cloudkitty-lokistack-gateway-bc75944f-8prh2" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.659160 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-gateway-bc75944f-pqqjw" Dec 05 17:32:13 crc kubenswrapper[4753]: I1205 17:32:13.675896 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-gateway-bc75944f-8prh2" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.011975 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-lokistack-ingester-0"] Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.013038 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-ingester-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.024887 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-ingester-http" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.024987 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-ingester-grpc" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.046713 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-ingester-0"] Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.111821 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-lokistack-compactor-0"] Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.113141 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-compactor-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.115395 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-compactor-grpc" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.115558 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-compactor-http" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.127376 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-compactor-0"] Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.185059 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-lokistack-index-gateway-0"] Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.186416 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.187074 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ingester-http\" (UniqueName: \"kubernetes.io/secret/227cc7e4-602f-4c1e-afa7-0e106d3f505f-cloudkitty-lokistack-ingester-http\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"227cc7e4-602f-4c1e-afa7-0e106d3f505f\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.187178 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vnktq\" (UniqueName: \"kubernetes.io/projected/227cc7e4-602f-4c1e-afa7-0e106d3f505f-kube-api-access-vnktq\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"227cc7e4-602f-4c1e-afa7-0e106d3f505f\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.187205 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"227cc7e4-602f-4c1e-afa7-0e106d3f505f\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.187230 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/227cc7e4-602f-4c1e-afa7-0e106d3f505f-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"227cc7e4-602f-4c1e-afa7-0e106d3f505f\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.187255 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"227cc7e4-602f-4c1e-afa7-0e106d3f505f\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.187286 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/227cc7e4-602f-4c1e-afa7-0e106d3f505f-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"227cc7e4-602f-4c1e-afa7-0e106d3f505f\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.187303 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ingester-grpc\" (UniqueName: \"kubernetes.io/secret/227cc7e4-602f-4c1e-afa7-0e106d3f505f-cloudkitty-lokistack-ingester-grpc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"227cc7e4-602f-4c1e-afa7-0e106d3f505f\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.187342 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/227cc7e4-602f-4c1e-afa7-0e106d3f505f-config\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"227cc7e4-602f-4c1e-afa7-0e106d3f505f\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.188251 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-index-gateway-http" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.190046 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-lokistack-index-gateway-grpc" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.195382 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-index-gateway-0"] Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.288816 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"227cc7e4-602f-4c1e-afa7-0e106d3f505f\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.288858 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/55b8a3d1-6aee-4a8d-8b7e-5f69ed46970b-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"55b8a3d1-6aee-4a8d-8b7e-5f69ed46970b\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.288902 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5j64c\" (UniqueName: \"kubernetes.io/projected/93568770-efee-4906-b491-17d0664bfa8b-kube-api-access-5j64c\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"93568770-efee-4906-b491-17d0664bfa8b\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.288926 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/227cc7e4-602f-4c1e-afa7-0e106d3f505f-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"227cc7e4-602f-4c1e-afa7-0e106d3f505f\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.288942 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ingester-grpc\" (UniqueName: \"kubernetes.io/secret/227cc7e4-602f-4c1e-afa7-0e106d3f505f-cloudkitty-lokistack-ingester-grpc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"227cc7e4-602f-4c1e-afa7-0e106d3f505f\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.288985 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/227cc7e4-602f-4c1e-afa7-0e106d3f505f-config\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"227cc7e4-602f-4c1e-afa7-0e106d3f505f\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.289010 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/55b8a3d1-6aee-4a8d-8b7e-5f69ed46970b-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"55b8a3d1-6aee-4a8d-8b7e-5f69ed46970b\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.289031 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-compactor-grpc\" (UniqueName: \"kubernetes.io/secret/55b8a3d1-6aee-4a8d-8b7e-5f69ed46970b-cloudkitty-lokistack-compactor-grpc\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"55b8a3d1-6aee-4a8d-8b7e-5f69ed46970b\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.289059 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-index-gateway-http\" (UniqueName: \"kubernetes.io/secret/93568770-efee-4906-b491-17d0664bfa8b-cloudkitty-lokistack-index-gateway-http\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"93568770-efee-4906-b491-17d0664bfa8b\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.289080 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ingester-http\" (UniqueName: \"kubernetes.io/secret/227cc7e4-602f-4c1e-afa7-0e106d3f505f-cloudkitty-lokistack-ingester-http\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"227cc7e4-602f-4c1e-afa7-0e106d3f505f\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.289096 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"93568770-efee-4906-b491-17d0664bfa8b\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.289113 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/93568770-efee-4906-b491-17d0664bfa8b-config\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"93568770-efee-4906-b491-17d0664bfa8b\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.289165 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-compactor-http\" (UniqueName: \"kubernetes.io/secret/55b8a3d1-6aee-4a8d-8b7e-5f69ed46970b-cloudkitty-lokistack-compactor-http\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"55b8a3d1-6aee-4a8d-8b7e-5f69ed46970b\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.289194 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"55b8a3d1-6aee-4a8d-8b7e-5f69ed46970b\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.289216 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/55b8a3d1-6aee-4a8d-8b7e-5f69ed46970b-config\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"55b8a3d1-6aee-4a8d-8b7e-5f69ed46970b\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.289235 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-frb2p\" (UniqueName: \"kubernetes.io/projected/55b8a3d1-6aee-4a8d-8b7e-5f69ed46970b-kube-api-access-frb2p\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"55b8a3d1-6aee-4a8d-8b7e-5f69ed46970b\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.289256 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vnktq\" (UniqueName: \"kubernetes.io/projected/227cc7e4-602f-4c1e-afa7-0e106d3f505f-kube-api-access-vnktq\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"227cc7e4-602f-4c1e-afa7-0e106d3f505f\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.289272 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-index-gateway-grpc\" (UniqueName: \"kubernetes.io/secret/93568770-efee-4906-b491-17d0664bfa8b-cloudkitty-lokistack-index-gateway-grpc\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"93568770-efee-4906-b491-17d0664bfa8b\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.289289 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"227cc7e4-602f-4c1e-afa7-0e106d3f505f\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.289309 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/93568770-efee-4906-b491-17d0664bfa8b-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"93568770-efee-4906-b491-17d0664bfa8b\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.289405 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/227cc7e4-602f-4c1e-afa7-0e106d3f505f-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"227cc7e4-602f-4c1e-afa7-0e106d3f505f\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.289453 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/93568770-efee-4906-b491-17d0664bfa8b-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"93568770-efee-4906-b491-17d0664bfa8b\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.289968 4753 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"227cc7e4-602f-4c1e-afa7-0e106d3f505f\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/cloudkitty-lokistack-ingester-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.292482 4753 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"227cc7e4-602f-4c1e-afa7-0e106d3f505f\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/cloudkitty-lokistack-ingester-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.293234 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/227cc7e4-602f-4c1e-afa7-0e106d3f505f-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"227cc7e4-602f-4c1e-afa7-0e106d3f505f\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.297131 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ingester-http\" (UniqueName: \"kubernetes.io/secret/227cc7e4-602f-4c1e-afa7-0e106d3f505f-cloudkitty-lokistack-ingester-http\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"227cc7e4-602f-4c1e-afa7-0e106d3f505f\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.301952 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/227cc7e4-602f-4c1e-afa7-0e106d3f505f-config\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"227cc7e4-602f-4c1e-afa7-0e106d3f505f\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.306887 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ingester-grpc\" (UniqueName: \"kubernetes.io/secret/227cc7e4-602f-4c1e-afa7-0e106d3f505f-cloudkitty-lokistack-ingester-grpc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"227cc7e4-602f-4c1e-afa7-0e106d3f505f\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.309581 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/227cc7e4-602f-4c1e-afa7-0e106d3f505f-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"227cc7e4-602f-4c1e-afa7-0e106d3f505f\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.311425 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vnktq\" (UniqueName: \"kubernetes.io/projected/227cc7e4-602f-4c1e-afa7-0e106d3f505f-kube-api-access-vnktq\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"227cc7e4-602f-4c1e-afa7-0e106d3f505f\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.319577 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"227cc7e4-602f-4c1e-afa7-0e106d3f505f\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.329604 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"cloudkitty-lokistack-ingester-0\" (UID: \"227cc7e4-602f-4c1e-afa7-0e106d3f505f\") " pod="openstack/cloudkitty-lokistack-ingester-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.336687 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-ingester-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.396687 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/55b8a3d1-6aee-4a8d-8b7e-5f69ed46970b-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"55b8a3d1-6aee-4a8d-8b7e-5f69ed46970b\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.396732 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-compactor-grpc\" (UniqueName: \"kubernetes.io/secret/55b8a3d1-6aee-4a8d-8b7e-5f69ed46970b-cloudkitty-lokistack-compactor-grpc\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"55b8a3d1-6aee-4a8d-8b7e-5f69ed46970b\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.396762 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-index-gateway-http\" (UniqueName: \"kubernetes.io/secret/93568770-efee-4906-b491-17d0664bfa8b-cloudkitty-lokistack-index-gateway-http\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"93568770-efee-4906-b491-17d0664bfa8b\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.396785 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"93568770-efee-4906-b491-17d0664bfa8b\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.396802 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/93568770-efee-4906-b491-17d0664bfa8b-config\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"93568770-efee-4906-b491-17d0664bfa8b\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.396838 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-compactor-http\" (UniqueName: \"kubernetes.io/secret/55b8a3d1-6aee-4a8d-8b7e-5f69ed46970b-cloudkitty-lokistack-compactor-http\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"55b8a3d1-6aee-4a8d-8b7e-5f69ed46970b\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.396863 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"55b8a3d1-6aee-4a8d-8b7e-5f69ed46970b\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.396885 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/55b8a3d1-6aee-4a8d-8b7e-5f69ed46970b-config\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"55b8a3d1-6aee-4a8d-8b7e-5f69ed46970b\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.396904 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-frb2p\" (UniqueName: \"kubernetes.io/projected/55b8a3d1-6aee-4a8d-8b7e-5f69ed46970b-kube-api-access-frb2p\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"55b8a3d1-6aee-4a8d-8b7e-5f69ed46970b\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.396927 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-index-gateway-grpc\" (UniqueName: \"kubernetes.io/secret/93568770-efee-4906-b491-17d0664bfa8b-cloudkitty-lokistack-index-gateway-grpc\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"93568770-efee-4906-b491-17d0664bfa8b\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.396945 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/93568770-efee-4906-b491-17d0664bfa8b-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"93568770-efee-4906-b491-17d0664bfa8b\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.396962 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/93568770-efee-4906-b491-17d0664bfa8b-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"93568770-efee-4906-b491-17d0664bfa8b\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.396990 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/55b8a3d1-6aee-4a8d-8b7e-5f69ed46970b-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"55b8a3d1-6aee-4a8d-8b7e-5f69ed46970b\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.397021 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5j64c\" (UniqueName: \"kubernetes.io/projected/93568770-efee-4906-b491-17d0664bfa8b-kube-api-access-5j64c\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"93568770-efee-4906-b491-17d0664bfa8b\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.402089 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/55b8a3d1-6aee-4a8d-8b7e-5f69ed46970b-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"55b8a3d1-6aee-4a8d-8b7e-5f69ed46970b\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.405280 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/55b8a3d1-6aee-4a8d-8b7e-5f69ed46970b-config\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"55b8a3d1-6aee-4a8d-8b7e-5f69ed46970b\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.405394 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-compactor-grpc\" (UniqueName: \"kubernetes.io/secret/55b8a3d1-6aee-4a8d-8b7e-5f69ed46970b-cloudkitty-lokistack-compactor-grpc\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"55b8a3d1-6aee-4a8d-8b7e-5f69ed46970b\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.406290 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/93568770-efee-4906-b491-17d0664bfa8b-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"93568770-efee-4906-b491-17d0664bfa8b\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.406717 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/55b8a3d1-6aee-4a8d-8b7e-5f69ed46970b-cloudkitty-lokistack-ca-bundle\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"55b8a3d1-6aee-4a8d-8b7e-5f69ed46970b\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.406833 4753 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"55b8a3d1-6aee-4a8d-8b7e-5f69ed46970b\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/cloudkitty-lokistack-compactor-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.407018 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/93568770-efee-4906-b491-17d0664bfa8b-config\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"93568770-efee-4906-b491-17d0664bfa8b\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.407172 4753 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"93568770-efee-4906-b491-17d0664bfa8b\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.411725 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-compactor-http\" (UniqueName: \"kubernetes.io/secret/55b8a3d1-6aee-4a8d-8b7e-5f69ed46970b-cloudkitty-lokistack-compactor-http\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"55b8a3d1-6aee-4a8d-8b7e-5f69ed46970b\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.413490 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-index-gateway-http\" (UniqueName: \"kubernetes.io/secret/93568770-efee-4906-b491-17d0664bfa8b-cloudkitty-lokistack-index-gateway-http\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"93568770-efee-4906-b491-17d0664bfa8b\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.415425 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-loki-s3\" (UniqueName: \"kubernetes.io/secret/93568770-efee-4906-b491-17d0664bfa8b-cloudkitty-loki-s3\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"93568770-efee-4906-b491-17d0664bfa8b\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.416206 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cloudkitty-lokistack-index-gateway-grpc\" (UniqueName: \"kubernetes.io/secret/93568770-efee-4906-b491-17d0664bfa8b-cloudkitty-lokistack-index-gateway-grpc\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"93568770-efee-4906-b491-17d0664bfa8b\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.421672 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-frb2p\" (UniqueName: \"kubernetes.io/projected/55b8a3d1-6aee-4a8d-8b7e-5f69ed46970b-kube-api-access-frb2p\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"55b8a3d1-6aee-4a8d-8b7e-5f69ed46970b\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.424849 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5j64c\" (UniqueName: \"kubernetes.io/projected/93568770-efee-4906-b491-17d0664bfa8b-kube-api-access-5j64c\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"93568770-efee-4906-b491-17d0664bfa8b\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.427332 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"cloudkitty-lokistack-compactor-0\" (UID: \"55b8a3d1-6aee-4a8d-8b7e-5f69ed46970b\") " pod="openstack/cloudkitty-lokistack-compactor-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.430639 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"cloudkitty-lokistack-index-gateway-0\" (UID: \"93568770-efee-4906-b491-17d0664bfa8b\") " pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.441563 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-compactor-0" Dec 05 17:32:14 crc kubenswrapper[4753]: I1205 17:32:14.512203 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 05 17:32:18 crc kubenswrapper[4753]: I1205 17:32:18.120282 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/alertmanager-metric-storage-0"] Dec 05 17:32:19 crc kubenswrapper[4753]: E1205 17:32:19.211662 4753 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 05 17:32:19 crc kubenswrapper[4753]: E1205 17:32:19.212410 4753 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n659h4h664hbh658h587h67ch89h587h8fh679hc6hf9h55fh644h5d5h698h68dh5cdh5ffh669h54ch9h689hb8hd4h5bfhd8h5d7h5fh665h574q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-v2sgt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-57d769cc4f-rchhw_openstack(30fe3d68-2fa2-416b-abac-b804b056a8af): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 17:32:19 crc kubenswrapper[4753]: E1205 17:32:19.213828 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-57d769cc4f-rchhw" podUID="30fe3d68-2fa2-416b-abac-b804b056a8af" Dec 05 17:32:19 crc kubenswrapper[4753]: E1205 17:32:19.258507 4753 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 05 17:32:19 crc kubenswrapper[4753]: E1205 17:32:19.258746 4753 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n68chd6h679hbfh55fhc6h5ffh5d8h94h56ch589hb4hc5h57bh677hcdh655h8dh667h675h654h66ch567h8fh659h5b4h675h566h55bh54h67dh6dq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-g8lr4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-666b6646f7-vznhm_openstack(22d4d35e-da87-4f57-a984-0bd8a4b10a3f): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 17:32:19 crc kubenswrapper[4753]: E1205 17:32:19.259952 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-666b6646f7-vznhm" podUID="22d4d35e-da87-4f57-a984-0bd8a4b10a3f" Dec 05 17:32:19 crc kubenswrapper[4753]: E1205 17:32:19.269119 4753 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 05 17:32:19 crc kubenswrapper[4753]: E1205 17:32:19.269275 4753 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-jfzzp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-675f4bcbfc-t4hjs_openstack(70bc6ed3-d663-46b8-bd6a-2985905fceb8): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 17:32:19 crc kubenswrapper[4753]: E1205 17:32:19.270543 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-675f4bcbfc-t4hjs" podUID="70bc6ed3-d663-46b8-bd6a-2985905fceb8" Dec 05 17:32:19 crc kubenswrapper[4753]: E1205 17:32:19.342473 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified\\\"\"" pod="openstack/dnsmasq-dns-666b6646f7-vznhm" podUID="22d4d35e-da87-4f57-a984-0bd8a4b10a3f" Dec 05 17:32:19 crc kubenswrapper[4753]: E1205 17:32:19.343243 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified\\\"\"" pod="openstack/dnsmasq-dns-57d769cc4f-rchhw" podUID="30fe3d68-2fa2-416b-abac-b804b056a8af" Dec 05 17:32:20 crc kubenswrapper[4753]: W1205 17:32:20.501225 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf409c39c_6d5a_4950_bd92_2ab8a26ad831.slice/crio-71f793c4e1e7a28196bafcc417c5d4a363f3f6c95693a5b82f6af49405d5c339 WatchSource:0}: Error finding container 71f793c4e1e7a28196bafcc417c5d4a363f3f6c95693a5b82f6af49405d5c339: Status 404 returned error can't find the container with id 71f793c4e1e7a28196bafcc417c5d4a363f3f6c95693a5b82f6af49405d5c339 Dec 05 17:32:20 crc kubenswrapper[4753]: E1205 17:32:20.584095 4753 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 05 17:32:20 crc kubenswrapper[4753]: E1205 17:32:20.584510 4753 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-8l6cx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-78dd6ddcc-ndvfw_openstack(37bad1d4-a7ff-49ef-bfa0-70d3c939b5d9): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 17:32:20 crc kubenswrapper[4753]: E1205 17:32:20.585691 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-78dd6ddcc-ndvfw" podUID="37bad1d4-a7ff-49ef-bfa0-70d3c939b5d9" Dec 05 17:32:20 crc kubenswrapper[4753]: I1205 17:32:20.710979 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-t4hjs" Dec 05 17:32:20 crc kubenswrapper[4753]: I1205 17:32:20.827080 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/70bc6ed3-d663-46b8-bd6a-2985905fceb8-config\") pod \"70bc6ed3-d663-46b8-bd6a-2985905fceb8\" (UID: \"70bc6ed3-d663-46b8-bd6a-2985905fceb8\") " Dec 05 17:32:20 crc kubenswrapper[4753]: I1205 17:32:20.827235 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jfzzp\" (UniqueName: \"kubernetes.io/projected/70bc6ed3-d663-46b8-bd6a-2985905fceb8-kube-api-access-jfzzp\") pod \"70bc6ed3-d663-46b8-bd6a-2985905fceb8\" (UID: \"70bc6ed3-d663-46b8-bd6a-2985905fceb8\") " Dec 05 17:32:20 crc kubenswrapper[4753]: I1205 17:32:20.830701 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/70bc6ed3-d663-46b8-bd6a-2985905fceb8-config" (OuterVolumeSpecName: "config") pod "70bc6ed3-d663-46b8-bd6a-2985905fceb8" (UID: "70bc6ed3-d663-46b8-bd6a-2985905fceb8"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:32:20 crc kubenswrapper[4753]: I1205 17:32:20.861355 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/70bc6ed3-d663-46b8-bd6a-2985905fceb8-kube-api-access-jfzzp" (OuterVolumeSpecName: "kube-api-access-jfzzp") pod "70bc6ed3-d663-46b8-bd6a-2985905fceb8" (UID: "70bc6ed3-d663-46b8-bd6a-2985905fceb8"). InnerVolumeSpecName "kube-api-access-jfzzp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:32:20 crc kubenswrapper[4753]: I1205 17:32:20.936523 4753 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/70bc6ed3-d663-46b8-bd6a-2985905fceb8-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:32:20 crc kubenswrapper[4753]: I1205 17:32:20.936554 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jfzzp\" (UniqueName: \"kubernetes.io/projected/70bc6ed3-d663-46b8-bd6a-2985905fceb8-kube-api-access-jfzzp\") on node \"crc\" DevicePath \"\"" Dec 05 17:32:21 crc kubenswrapper[4753]: I1205 17:32:21.359606 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"f409c39c-6d5a-4950-bd92-2ab8a26ad831","Type":"ContainerStarted","Data":"71f793c4e1e7a28196bafcc417c5d4a363f3f6c95693a5b82f6af49405d5c339"} Dec 05 17:32:21 crc kubenswrapper[4753]: I1205 17:32:21.363641 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-t4hjs" event={"ID":"70bc6ed3-d663-46b8-bd6a-2985905fceb8","Type":"ContainerDied","Data":"cacb634bd9738be217523958802e4da5573ce3a570491034a301997b5a53c13c"} Dec 05 17:32:21 crc kubenswrapper[4753]: I1205 17:32:21.363681 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-t4hjs" Dec 05 17:32:21 crc kubenswrapper[4753]: I1205 17:32:21.519079 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-t4hjs"] Dec 05 17:32:21 crc kubenswrapper[4753]: I1205 17:32:21.527441 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-t4hjs"] Dec 05 17:32:21 crc kubenswrapper[4753]: I1205 17:32:21.771314 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="70bc6ed3-d663-46b8-bd6a-2985905fceb8" path="/var/lib/kubelet/pods/70bc6ed3-d663-46b8-bd6a-2985905fceb8/volumes" Dec 05 17:32:22 crc kubenswrapper[4753]: I1205 17:32:22.040092 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 05 17:32:22 crc kubenswrapper[4753]: I1205 17:32:22.062902 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-8m7cw"] Dec 05 17:32:22 crc kubenswrapper[4753]: W1205 17:32:22.070251 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8978936d_d71e_4840_9cc4_666746ebeecf.slice/crio-c2a1f4eeb89ba756f9be8372ea587d2278cb64603760f2df81e76ee58bc62999 WatchSource:0}: Error finding container c2a1f4eeb89ba756f9be8372ea587d2278cb64603760f2df81e76ee58bc62999: Status 404 returned error can't find the container with id c2a1f4eeb89ba756f9be8372ea587d2278cb64603760f2df81e76ee58bc62999 Dec 05 17:32:22 crc kubenswrapper[4753]: I1205 17:32:22.081627 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 05 17:32:22 crc kubenswrapper[4753]: I1205 17:32:22.200365 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 05 17:32:22 crc kubenswrapper[4753]: W1205 17:32:22.266681 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9f33836c_96c6_4da3_b2d6_e9c12631f2b4.slice/crio-dcf7e53f47846bdb9e25d58ae1723fbb91c148312d8c2e2667cf4a98e02210a4 WatchSource:0}: Error finding container dcf7e53f47846bdb9e25d58ae1723fbb91c148312d8c2e2667cf4a98e02210a4: Status 404 returned error can't find the container with id dcf7e53f47846bdb9e25d58ae1723fbb91c148312d8c2e2667cf4a98e02210a4 Dec 05 17:32:22 crc kubenswrapper[4753]: I1205 17:32:22.391729 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-8m7cw" event={"ID":"9f33836c-96c6-4da3-b2d6-e9c12631f2b4","Type":"ContainerStarted","Data":"dcf7e53f47846bdb9e25d58ae1723fbb91c148312d8c2e2667cf4a98e02210a4"} Dec 05 17:32:22 crc kubenswrapper[4753]: I1205 17:32:22.401767 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"8978936d-d71e-4840-9cc4-666746ebeecf","Type":"ContainerStarted","Data":"c2a1f4eeb89ba756f9be8372ea587d2278cb64603760f2df81e76ee58bc62999"} Dec 05 17:32:22 crc kubenswrapper[4753]: I1205 17:32:22.406874 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"f5652c22-3bf2-454d-a4cf-fd0378f133b8","Type":"ContainerStarted","Data":"d2b7fe5d9b96c0331922682eb4b32643de6c7d8b758a699b13da54a8a9b025a1"} Dec 05 17:32:22 crc kubenswrapper[4753]: W1205 17:32:22.470255 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9df69769_e394_444f_b6e2_e788e989fe92.slice/crio-d65cc3cffe1b3d5b28ac37c015c322e6495c60bcaea784932204db5164712155 WatchSource:0}: Error finding container d65cc3cffe1b3d5b28ac37c015c322e6495c60bcaea784932204db5164712155: Status 404 returned error can't find the container with id d65cc3cffe1b3d5b28ac37c015c322e6495c60bcaea784932204db5164712155 Dec 05 17:32:22 crc kubenswrapper[4753]: I1205 17:32:22.635251 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-vh5xk"] Dec 05 17:32:22 crc kubenswrapper[4753]: I1205 17:32:22.708823 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-index-gateway-0"] Dec 05 17:32:22 crc kubenswrapper[4753]: I1205 17:32:22.718848 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-compactor-0"] Dec 05 17:32:22 crc kubenswrapper[4753]: I1205 17:32:22.724973 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-gateway-bc75944f-pqqjw"] Dec 05 17:32:22 crc kubenswrapper[4753]: I1205 17:32:22.731646 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-querier-5467947bf7-7l6pk"] Dec 05 17:32:22 crc kubenswrapper[4753]: I1205 17:32:22.748732 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 05 17:32:22 crc kubenswrapper[4753]: I1205 17:32:22.761254 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 17:32:22 crc kubenswrapper[4753]: I1205 17:32:22.771571 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-gateway-bc75944f-8prh2"] Dec 05 17:32:22 crc kubenswrapper[4753]: I1205 17:32:22.782750 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-distributor-664b687b54-jj67d"] Dec 05 17:32:22 crc kubenswrapper[4753]: I1205 17:32:22.798316 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 05 17:32:22 crc kubenswrapper[4753]: I1205 17:32:22.811663 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-lokistack-ingester-0"] Dec 05 17:32:22 crc kubenswrapper[4753]: I1205 17:32:22.818023 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 05 17:32:22 crc kubenswrapper[4753]: I1205 17:32:22.923564 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-qhphp"] Dec 05 17:32:23 crc kubenswrapper[4753]: W1205 17:32:23.094197 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod227cc7e4_602f_4c1e_afa7_0e106d3f505f.slice/crio-ffbd211680884da158d97cf1f187ba7f8d94d56f4394f79dd83c4e501052b4b7 WatchSource:0}: Error finding container ffbd211680884da158d97cf1f187ba7f8d94d56f4394f79dd83c4e501052b4b7: Status 404 returned error can't find the container with id ffbd211680884da158d97cf1f187ba7f8d94d56f4394f79dd83c4e501052b4b7 Dec 05 17:32:23 crc kubenswrapper[4753]: W1205 17:32:23.149534 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc44e1a93_e233_46a2_b18a_e6c8c396a394.slice/crio-bba8b5942e6a1ef3de56a177c62900be2fca913372f13f8bc77d891d60ca1f17 WatchSource:0}: Error finding container bba8b5942e6a1ef3de56a177c62900be2fca913372f13f8bc77d891d60ca1f17: Status 404 returned error can't find the container with id bba8b5942e6a1ef3de56a177c62900be2fca913372f13f8bc77d891d60ca1f17 Dec 05 17:32:23 crc kubenswrapper[4753]: E1205 17:32:23.194030 4753 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:ovsdb-server-init,Image:quay.io/podified-antelope-centos9/openstack-ovn-base:current-podified,Command:[/usr/local/bin/container-scripts/init-ovsdb-server.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n656h55bh5bbh55ch99h596hch9dh688h89h77hf4h679h689h675hb6h84h664h564h5c8h674h564h575h5b6h66dh55bh5b5h5fch57h57h645hfdq,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-ovs,ReadOnly:false,MountPath:/etc/openvswitch,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:var-run,ReadOnly:false,MountPath:/var/run/openvswitch,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:var-log,ReadOnly:false,MountPath:/var/log/openvswitch,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:var-lib,ReadOnly:false,MountPath:/var/lib/openvswitch,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-np796,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[NET_ADMIN SYS_ADMIN SYS_NICE],Drop:[],},Privileged:*true,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-controller-ovs-qhphp_openstack(284db7f3-ca89-447a-90eb-487d43e49f7d): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 17:32:23 crc kubenswrapper[4753]: E1205 17:32:23.194213 4753 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-state-metrics,Image:registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0,Command:[],Args:[--resources=pods --namespaces=openstack],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:http-metrics,HostPort:0,ContainerPort:8080,Protocol:TCP,HostIP:,},ContainerPort{Name:telemetry,HostPort:0,ContainerPort:8081,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-q2vzc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/livez,Port:{0 8080 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod kube-state-metrics-0_openstack(28078f95-1316-46c1-9dda-9912561aa4e4): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 17:32:23 crc kubenswrapper[4753]: E1205 17:32:23.195239 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovsdb-server-init\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack/ovn-controller-ovs-qhphp" podUID="284db7f3-ca89-447a-90eb-487d43e49f7d" Dec 05 17:32:23 crc kubenswrapper[4753]: E1205 17:32:23.195285 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-state-metrics\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack/kube-state-metrics-0" podUID="28078f95-1316-46c1-9dda-9912561aa4e4" Dec 05 17:32:23 crc kubenswrapper[4753]: I1205 17:32:23.276567 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-ndvfw" Dec 05 17:32:23 crc kubenswrapper[4753]: I1205 17:32:23.315779 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8l6cx\" (UniqueName: \"kubernetes.io/projected/37bad1d4-a7ff-49ef-bfa0-70d3c939b5d9-kube-api-access-8l6cx\") pod \"37bad1d4-a7ff-49ef-bfa0-70d3c939b5d9\" (UID: \"37bad1d4-a7ff-49ef-bfa0-70d3c939b5d9\") " Dec 05 17:32:23 crc kubenswrapper[4753]: I1205 17:32:23.316408 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/37bad1d4-a7ff-49ef-bfa0-70d3c939b5d9-dns-svc\") pod \"37bad1d4-a7ff-49ef-bfa0-70d3c939b5d9\" (UID: \"37bad1d4-a7ff-49ef-bfa0-70d3c939b5d9\") " Dec 05 17:32:23 crc kubenswrapper[4753]: I1205 17:32:23.316546 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/37bad1d4-a7ff-49ef-bfa0-70d3c939b5d9-config\") pod \"37bad1d4-a7ff-49ef-bfa0-70d3c939b5d9\" (UID: \"37bad1d4-a7ff-49ef-bfa0-70d3c939b5d9\") " Dec 05 17:32:23 crc kubenswrapper[4753]: I1205 17:32:23.316899 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/37bad1d4-a7ff-49ef-bfa0-70d3c939b5d9-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "37bad1d4-a7ff-49ef-bfa0-70d3c939b5d9" (UID: "37bad1d4-a7ff-49ef-bfa0-70d3c939b5d9"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:32:23 crc kubenswrapper[4753]: I1205 17:32:23.317042 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/37bad1d4-a7ff-49ef-bfa0-70d3c939b5d9-config" (OuterVolumeSpecName: "config") pod "37bad1d4-a7ff-49ef-bfa0-70d3c939b5d9" (UID: "37bad1d4-a7ff-49ef-bfa0-70d3c939b5d9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:32:23 crc kubenswrapper[4753]: I1205 17:32:23.317061 4753 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/37bad1d4-a7ff-49ef-bfa0-70d3c939b5d9-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 17:32:23 crc kubenswrapper[4753]: I1205 17:32:23.320773 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/37bad1d4-a7ff-49ef-bfa0-70d3c939b5d9-kube-api-access-8l6cx" (OuterVolumeSpecName: "kube-api-access-8l6cx") pod "37bad1d4-a7ff-49ef-bfa0-70d3c939b5d9" (UID: "37bad1d4-a7ff-49ef-bfa0-70d3c939b5d9"). InnerVolumeSpecName "kube-api-access-8l6cx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:32:23 crc kubenswrapper[4753]: I1205 17:32:23.418636 4753 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/37bad1d4-a7ff-49ef-bfa0-70d3c939b5d9-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:32:23 crc kubenswrapper[4753]: I1205 17:32:23.418934 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8l6cx\" (UniqueName: \"kubernetes.io/projected/37bad1d4-a7ff-49ef-bfa0-70d3c939b5d9-kube-api-access-8l6cx\") on node \"crc\" DevicePath \"\"" Dec 05 17:32:23 crc kubenswrapper[4753]: I1205 17:32:23.421745 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-compactor-0" event={"ID":"55b8a3d1-6aee-4a8d-8b7e-5f69ed46970b","Type":"ContainerStarted","Data":"cf2b514930d685c2d436cdffd281cf29f35a3769f990e667ec972e66132b84e8"} Dec 05 17:32:23 crc kubenswrapper[4753]: I1205 17:32:23.422956 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-gateway-bc75944f-pqqjw" event={"ID":"8f82a8e6-b07e-4bf9-801e-04c1f96fe703","Type":"ContainerStarted","Data":"ffb71573945fdf22912b4933a233f5c2bfaf1e70a1c50cdf6b038ac13f4fdc3f"} Dec 05 17:32:23 crc kubenswrapper[4753]: I1205 17:32:23.424198 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-gateway-bc75944f-8prh2" event={"ID":"389b7205-589e-4027-ae02-ba2287c7e0ed","Type":"ContainerStarted","Data":"429fe539719887325e6ad727d6d16ed5ece06121b79dfc243692954bf3ee4a88"} Dec 05 17:32:23 crc kubenswrapper[4753]: I1205 17:32:23.425552 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-index-gateway-0" event={"ID":"93568770-efee-4906-b491-17d0664bfa8b","Type":"ContainerStarted","Data":"adfda5ac0fe4a6dd6afe8e2f31b9285f7e4367ec305f1fa0bd2b28a3c5ff33a8"} Dec 05 17:32:23 crc kubenswrapper[4753]: I1205 17:32:23.426877 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-qhphp" event={"ID":"284db7f3-ca89-447a-90eb-487d43e49f7d","Type":"ContainerStarted","Data":"7a54dde13773a66f614671647df92b3dc74de035e4c339e29a23a922d9d48340"} Dec 05 17:32:23 crc kubenswrapper[4753]: E1205 17:32:23.428263 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovsdb-server-init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-ovn-base:current-podified\\\"\"" pod="openstack/ovn-controller-ovs-qhphp" podUID="284db7f3-ca89-447a-90eb-487d43e49f7d" Dec 05 17:32:23 crc kubenswrapper[4753]: I1205 17:32:23.432686 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-querier-5467947bf7-7l6pk" event={"ID":"89ac2139-b38d-40b1-939d-b23748c819d0","Type":"ContainerStarted","Data":"da80f3fed4e533118e6e19f42f1c6180f6ea76920c466308267e2bdbe3ff0e20"} Dec 05 17:32:23 crc kubenswrapper[4753]: I1205 17:32:23.435718 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"f8c13e8e-fec9-49e2-a2b0-5ca0473d2469","Type":"ContainerStarted","Data":"4a796c7c1a8eaf1de6f60b94d6dd3eac6f40ff74d6ee99bd7e81366842044355"} Dec 05 17:32:23 crc kubenswrapper[4753]: I1205 17:32:23.437319 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"c44e1a93-e233-46a2-b18a-e6c8c396a394","Type":"ContainerStarted","Data":"bba8b5942e6a1ef3de56a177c62900be2fca913372f13f8bc77d891d60ca1f17"} Dec 05 17:32:23 crc kubenswrapper[4753]: I1205 17:32:23.439230 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-ndvfw" event={"ID":"37bad1d4-a7ff-49ef-bfa0-70d3c939b5d9","Type":"ContainerDied","Data":"696d72b151fcfb1eff7e8e55085b9b348d324038f4c1038d1a1f7c212637c023"} Dec 05 17:32:23 crc kubenswrapper[4753]: I1205 17:32:23.439515 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-ndvfw" Dec 05 17:32:23 crc kubenswrapper[4753]: I1205 17:32:23.441356 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"9df69769-e394-444f-b6e2-e788e989fe92","Type":"ContainerStarted","Data":"d65cc3cffe1b3d5b28ac37c015c322e6495c60bcaea784932204db5164712155"} Dec 05 17:32:23 crc kubenswrapper[4753]: I1205 17:32:23.443751 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-vh5xk" event={"ID":"10df4aa4-d920-45d4-9592-72c32d59c312","Type":"ContainerStarted","Data":"8f896b34fe31e8dc7219adf7d4eb62e3fc1d529d0baa7c6f2377d74c730d0cea"} Dec 05 17:32:23 crc kubenswrapper[4753]: I1205 17:32:23.449409 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"4c8d8a7a-38bd-49d9-8f25-5495c32462bc","Type":"ContainerStarted","Data":"bbb1578e975beb672da9a00878107e7539f952eb6479b9513c4562e23b1733d6"} Dec 05 17:32:23 crc kubenswrapper[4753]: I1205 17:32:23.451711 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-ingester-0" event={"ID":"227cc7e4-602f-4c1e-afa7-0e106d3f505f","Type":"ContainerStarted","Data":"ffbd211680884da158d97cf1f187ba7f8d94d56f4394f79dd83c4e501052b4b7"} Dec 05 17:32:23 crc kubenswrapper[4753]: I1205 17:32:23.452878 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"09e6b220-0a70-4359-93f4-4450b2e458c8","Type":"ContainerStarted","Data":"9766fb9f1ec1ed1ec8e0be0b6b36076da165bbaaaff6fb680fd435f920bde8e2"} Dec 05 17:32:23 crc kubenswrapper[4753]: I1205 17:32:23.454798 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"28078f95-1316-46c1-9dda-9912561aa4e4","Type":"ContainerStarted","Data":"128409f6754423fcd428f2bce32cd77e047aa65cdd30460e786ce9f76528ddd2"} Dec 05 17:32:23 crc kubenswrapper[4753]: E1205 17:32:23.456763 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-state-metrics\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0\\\"\"" pod="openstack/kube-state-metrics-0" podUID="28078f95-1316-46c1-9dda-9912561aa4e4" Dec 05 17:32:23 crc kubenswrapper[4753]: I1205 17:32:23.458716 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"36088f6e-7c3c-4fee-918e-e1ee91bf6b33","Type":"ContainerStarted","Data":"771faf157830e014422c7e771db96a6f041aee98cdf73b6a9d7d87a972150a7c"} Dec 05 17:32:23 crc kubenswrapper[4753]: I1205 17:32:23.460282 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-distributor-664b687b54-jj67d" event={"ID":"16d583e9-9ea0-4222-a38a-f8e1be33cdae","Type":"ContainerStarted","Data":"9372cd0dbc898aae60b6bbe3562efeecea42b99fb2468a61c7b7cba265fd8400"} Dec 05 17:32:23 crc kubenswrapper[4753]: I1205 17:32:23.547883 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-ndvfw"] Dec 05 17:32:23 crc kubenswrapper[4753]: I1205 17:32:23.565780 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-ndvfw"] Dec 05 17:32:23 crc kubenswrapper[4753]: I1205 17:32:23.734512 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="37bad1d4-a7ff-49ef-bfa0-70d3c939b5d9" path="/var/lib/kubelet/pods/37bad1d4-a7ff-49ef-bfa0-70d3c939b5d9/volumes" Dec 05 17:32:24 crc kubenswrapper[4753]: E1205 17:32:24.500378 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-state-metrics\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0\\\"\"" pod="openstack/kube-state-metrics-0" podUID="28078f95-1316-46c1-9dda-9912561aa4e4" Dec 05 17:32:24 crc kubenswrapper[4753]: E1205 17:32:24.500402 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovsdb-server-init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-ovn-base:current-podified\\\"\"" pod="openstack/ovn-controller-ovs-qhphp" podUID="284db7f3-ca89-447a-90eb-487d43e49f7d" Dec 05 17:32:26 crc kubenswrapper[4753]: I1205 17:32:26.506254 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"f409c39c-6d5a-4950-bd92-2ab8a26ad831","Type":"ContainerStarted","Data":"0884ded3176bea034f2454815ad69ed511d3394112180ad9f4088407cae1e8cf"} Dec 05 17:32:26 crc kubenswrapper[4753]: I1205 17:32:26.507981 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"c44e1a93-e233-46a2-b18a-e6c8c396a394","Type":"ContainerStarted","Data":"3bb4573a77f9c3cffe68adc2ad6afa1507ba3d026176ddbf48822e35ce50e42d"} Dec 05 17:32:28 crc kubenswrapper[4753]: I1205 17:32:28.978564 4753 patch_prober.go:28] interesting pod/machine-config-daemon-khn68 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 17:32:28 crc kubenswrapper[4753]: I1205 17:32:28.978884 4753 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 17:32:28 crc kubenswrapper[4753]: I1205 17:32:28.978934 4753 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-khn68" Dec 05 17:32:28 crc kubenswrapper[4753]: I1205 17:32:28.979722 4753 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"87682a74661e693e498cd793cc20d16fc9f4a3b8a1a6b54f10285e2dcd15eafd"} pod="openshift-machine-config-operator/machine-config-daemon-khn68" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 17:32:28 crc kubenswrapper[4753]: I1205 17:32:28.979786 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerName="machine-config-daemon" containerID="cri-o://87682a74661e693e498cd793cc20d16fc9f4a3b8a1a6b54f10285e2dcd15eafd" gracePeriod=600 Dec 05 17:32:30 crc kubenswrapper[4753]: I1205 17:32:30.542605 4753 generic.go:334] "Generic (PLEG): container finished" podID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerID="87682a74661e693e498cd793cc20d16fc9f4a3b8a1a6b54f10285e2dcd15eafd" exitCode=0 Dec 05 17:32:30 crc kubenswrapper[4753]: I1205 17:32:30.542657 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" event={"ID":"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68","Type":"ContainerDied","Data":"87682a74661e693e498cd793cc20d16fc9f4a3b8a1a6b54f10285e2dcd15eafd"} Dec 05 17:32:30 crc kubenswrapper[4753]: I1205 17:32:30.543204 4753 scope.go:117] "RemoveContainer" containerID="e5f48dac911921ce787e6e9fea9709c824d57dffe736123659ddb77fb75ed48a" Dec 05 17:32:30 crc kubenswrapper[4753]: I1205 17:32:30.723237 4753 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 17:32:34 crc kubenswrapper[4753]: I1205 17:32:34.584511 4753 generic.go:334] "Generic (PLEG): container finished" podID="f409c39c-6d5a-4950-bd92-2ab8a26ad831" containerID="0884ded3176bea034f2454815ad69ed511d3394112180ad9f4088407cae1e8cf" exitCode=0 Dec 05 17:32:34 crc kubenswrapper[4753]: I1205 17:32:34.584630 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"f409c39c-6d5a-4950-bd92-2ab8a26ad831","Type":"ContainerDied","Data":"0884ded3176bea034f2454815ad69ed511d3394112180ad9f4088407cae1e8cf"} Dec 05 17:32:34 crc kubenswrapper[4753]: I1205 17:32:34.588692 4753 generic.go:334] "Generic (PLEG): container finished" podID="c44e1a93-e233-46a2-b18a-e6c8c396a394" containerID="3bb4573a77f9c3cffe68adc2ad6afa1507ba3d026176ddbf48822e35ce50e42d" exitCode=0 Dec 05 17:32:34 crc kubenswrapper[4753]: I1205 17:32:34.588717 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"c44e1a93-e233-46a2-b18a-e6c8c396a394","Type":"ContainerDied","Data":"3bb4573a77f9c3cffe68adc2ad6afa1507ba3d026176ddbf48822e35ce50e42d"} Dec 05 17:32:35 crc kubenswrapper[4753]: E1205 17:32:35.864793 4753 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/openshift-logging/logging-loki-rhel9@sha256:06b83c3cbf0c5db4dd9812e046ca14189d18cf7b3c7f2f2c37aa705cc5f5deb7" Dec 05 17:32:35 crc kubenswrapper[4753]: E1205 17:32:35.865583 4753 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:loki-querier,Image:registry.redhat.io/openshift-logging/logging-loki-rhel9@sha256:06b83c3cbf0c5db4dd9812e046ca14189d18cf7b3c7f2f2c37aa705cc5f5deb7,Command:[],Args:[-target=querier -config.file=/etc/loki/config/config.yaml -runtime-config.file=/etc/loki/config/runtime-config.yaml -config.expand-env=true],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:3100,Protocol:TCP,HostIP:,},ContainerPort{Name:grpclb,HostPort:0,ContainerPort:9095,Protocol:TCP,HostIP:,},ContainerPort{Name:gossip-ring,HostPort:0,ContainerPort:7946,Protocol:TCP,HostIP:,},ContainerPort{Name:healthchecks,HostPort:0,ContainerPort:3101,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:AWS_ACCESS_KEY_ID,Value:,ValueFrom:&EnvVarSource{FieldRef:nil,ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:&SecretKeySelector{LocalObjectReference:LocalObjectReference{Name:cloudkitty-loki-s3,},Key:access_key_id,Optional:nil,},},},EnvVar{Name:AWS_ACCESS_KEY_SECRET,Value:,ValueFrom:&EnvVarSource{FieldRef:nil,ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:&SecretKeySelector{LocalObjectReference:LocalObjectReference{Name:cloudkitty-loki-s3,},Key:access_key_secret,Optional:nil,},},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:false,MountPath:/etc/loki/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:cloudkitty-lokistack-querier-http,ReadOnly:false,MountPath:/var/run/tls/http/server,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:cloudkitty-loki-s3,ReadOnly:false,MountPath:/etc/storage/secrets,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:cloudkitty-lokistack-querier-grpc,ReadOnly:false,MountPath:/var/run/tls/grpc/server,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:cloudkitty-lokistack-ca-bundle,ReadOnly:false,MountPath:/var/run/ca,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-9lbsr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/loki/api/v1/status/buildinfo,Port:{0 3101 },Host:,Scheme:HTTPS,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:2,PeriodSeconds:30,SuccessThreshold:1,FailureThreshold:10,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/ready,Port:{0 3101 },Host:,Scheme:HTTPS,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cloudkitty-lokistack-querier-5467947bf7-7l6pk_openstack(89ac2139-b38d-40b1-939d-b23748c819d0): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 17:32:35 crc kubenswrapper[4753]: E1205 17:32:35.866915 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"loki-querier\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openstack/cloudkitty-lokistack-querier-5467947bf7-7l6pk" podUID="89ac2139-b38d-40b1-939d-b23748c819d0" Dec 05 17:32:35 crc kubenswrapper[4753]: E1205 17:32:35.872327 4753 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/openshift-logging/logging-loki-rhel9@sha256:06b83c3cbf0c5db4dd9812e046ca14189d18cf7b3c7f2f2c37aa705cc5f5deb7" Dec 05 17:32:35 crc kubenswrapper[4753]: E1205 17:32:35.872661 4753 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:loki-compactor,Image:registry.redhat.io/openshift-logging/logging-loki-rhel9@sha256:06b83c3cbf0c5db4dd9812e046ca14189d18cf7b3c7f2f2c37aa705cc5f5deb7,Command:[],Args:[-target=compactor -config.file=/etc/loki/config/config.yaml -runtime-config.file=/etc/loki/config/runtime-config.yaml -config.expand-env=true],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:3100,Protocol:TCP,HostIP:,},ContainerPort{Name:grpclb,HostPort:0,ContainerPort:9095,Protocol:TCP,HostIP:,},ContainerPort{Name:healthchecks,HostPort:0,ContainerPort:3101,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:AWS_ACCESS_KEY_ID,Value:,ValueFrom:&EnvVarSource{FieldRef:nil,ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:&SecretKeySelector{LocalObjectReference:LocalObjectReference{Name:cloudkitty-loki-s3,},Key:access_key_id,Optional:nil,},},},EnvVar{Name:AWS_ACCESS_KEY_SECRET,Value:,ValueFrom:&EnvVarSource{FieldRef:nil,ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:&SecretKeySelector{LocalObjectReference:LocalObjectReference{Name:cloudkitty-loki-s3,},Key:access_key_secret,Optional:nil,},},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:false,MountPath:/etc/loki/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:storage,ReadOnly:false,MountPath:/tmp/loki,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:cloudkitty-lokistack-compactor-http,ReadOnly:false,MountPath:/var/run/tls/http/server,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:cloudkitty-loki-s3,ReadOnly:false,MountPath:/etc/storage/secrets,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:cloudkitty-lokistack-compactor-grpc,ReadOnly:false,MountPath:/var/run/tls/grpc/server,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:cloudkitty-lokistack-ca-bundle,ReadOnly:false,MountPath:/var/run/ca,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-frb2p,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/loki/api/v1/status/buildinfo,Port:{0 3101 },Host:,Scheme:HTTPS,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:2,PeriodSeconds:30,SuccessThreshold:1,FailureThreshold:10,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/ready,Port:{0 3101 },Host:,Scheme:HTTPS,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cloudkitty-lokistack-compactor-0_openstack(55b8a3d1-6aee-4a8d-8b7e-5f69ed46970b): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 17:32:35 crc kubenswrapper[4753]: E1205 17:32:35.874261 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"loki-compactor\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openstack/cloudkitty-lokistack-compactor-0" podUID="55b8a3d1-6aee-4a8d-8b7e-5f69ed46970b" Dec 05 17:32:35 crc kubenswrapper[4753]: E1205 17:32:35.916745 4753 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/openshift-logging/logging-loki-rhel9@sha256:06b83c3cbf0c5db4dd9812e046ca14189d18cf7b3c7f2f2c37aa705cc5f5deb7" Dec 05 17:32:35 crc kubenswrapper[4753]: E1205 17:32:35.917338 4753 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:loki-ingester,Image:registry.redhat.io/openshift-logging/logging-loki-rhel9@sha256:06b83c3cbf0c5db4dd9812e046ca14189d18cf7b3c7f2f2c37aa705cc5f5deb7,Command:[],Args:[-target=ingester -config.file=/etc/loki/config/config.yaml -runtime-config.file=/etc/loki/config/runtime-config.yaml -config.expand-env=true],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:3100,Protocol:TCP,HostIP:,},ContainerPort{Name:grpclb,HostPort:0,ContainerPort:9095,Protocol:TCP,HostIP:,},ContainerPort{Name:gossip-ring,HostPort:0,ContainerPort:7946,Protocol:TCP,HostIP:,},ContainerPort{Name:healthchecks,HostPort:0,ContainerPort:3101,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:AWS_ACCESS_KEY_ID,Value:,ValueFrom:&EnvVarSource{FieldRef:nil,ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:&SecretKeySelector{LocalObjectReference:LocalObjectReference{Name:cloudkitty-loki-s3,},Key:access_key_id,Optional:nil,},},},EnvVar{Name:AWS_ACCESS_KEY_SECRET,Value:,ValueFrom:&EnvVarSource{FieldRef:nil,ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:&SecretKeySelector{LocalObjectReference:LocalObjectReference{Name:cloudkitty-loki-s3,},Key:access_key_secret,Optional:nil,},},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:false,MountPath:/etc/loki/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:storage,ReadOnly:false,MountPath:/tmp/loki,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:wal,ReadOnly:false,MountPath:/tmp/wal,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:cloudkitty-lokistack-ingester-http,ReadOnly:false,MountPath:/var/run/tls/http/server,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:cloudkitty-loki-s3,ReadOnly:false,MountPath:/etc/storage/secrets,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:cloudkitty-lokistack-ingester-grpc,ReadOnly:false,MountPath:/var/run/tls/grpc/server,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:cloudkitty-lokistack-ca-bundle,ReadOnly:false,MountPath:/var/run/ca,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-vnktq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/loki/api/v1/status/buildinfo,Port:{0 3101 },Host:,Scheme:HTTPS,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:2,PeriodSeconds:30,SuccessThreshold:1,FailureThreshold:10,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/ready,Port:{0 3101 },Host:,Scheme:HTTPS,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cloudkitty-lokistack-ingester-0_openstack(227cc7e4-602f-4c1e-afa7-0e106d3f505f): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 17:32:35 crc kubenswrapper[4753]: E1205 17:32:35.918660 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"loki-ingester\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openstack/cloudkitty-lokistack-ingester-0" podUID="227cc7e4-602f-4c1e-afa7-0e106d3f505f" Dec 05 17:32:36 crc kubenswrapper[4753]: E1205 17:32:36.607758 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"loki-compactor\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/openshift-logging/logging-loki-rhel9@sha256:06b83c3cbf0c5db4dd9812e046ca14189d18cf7b3c7f2f2c37aa705cc5f5deb7\\\"\"" pod="openstack/cloudkitty-lokistack-compactor-0" podUID="55b8a3d1-6aee-4a8d-8b7e-5f69ed46970b" Dec 05 17:32:36 crc kubenswrapper[4753]: E1205 17:32:36.608715 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"loki-ingester\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/openshift-logging/logging-loki-rhel9@sha256:06b83c3cbf0c5db4dd9812e046ca14189d18cf7b3c7f2f2c37aa705cc5f5deb7\\\"\"" pod="openstack/cloudkitty-lokistack-ingester-0" podUID="227cc7e4-602f-4c1e-afa7-0e106d3f505f" Dec 05 17:32:36 crc kubenswrapper[4753]: E1205 17:32:36.608888 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"loki-querier\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/openshift-logging/logging-loki-rhel9@sha256:06b83c3cbf0c5db4dd9812e046ca14189d18cf7b3c7f2f2c37aa705cc5f5deb7\\\"\"" pod="openstack/cloudkitty-lokistack-querier-5467947bf7-7l6pk" podUID="89ac2139-b38d-40b1-939d-b23748c819d0" Dec 05 17:32:40 crc kubenswrapper[4753]: E1205 17:32:40.195086 4753 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/openshift-logging/logging-loki-rhel9@sha256:06b83c3cbf0c5db4dd9812e046ca14189d18cf7b3c7f2f2c37aa705cc5f5deb7" Dec 05 17:32:40 crc kubenswrapper[4753]: E1205 17:32:40.196166 4753 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:loki-query-frontend,Image:registry.redhat.io/openshift-logging/logging-loki-rhel9@sha256:06b83c3cbf0c5db4dd9812e046ca14189d18cf7b3c7f2f2c37aa705cc5f5deb7,Command:[],Args:[-target=query-frontend -config.file=/etc/loki/config/config.yaml -runtime-config.file=/etc/loki/config/runtime-config.yaml -config.expand-env=true],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:3100,Protocol:TCP,HostIP:,},ContainerPort{Name:grpclb,HostPort:0,ContainerPort:9095,Protocol:TCP,HostIP:,},ContainerPort{Name:healthchecks,HostPort:0,ContainerPort:3101,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:false,MountPath:/etc/loki/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:cloudkitty-lokistack-query-frontend-http,ReadOnly:false,MountPath:/var/run/tls/http/server,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:cloudkitty-lokistack-query-frontend-grpc,ReadOnly:false,MountPath:/var/run/tls/grpc/server,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:cloudkitty-lokistack-ca-bundle,ReadOnly:false,MountPath:/var/run/ca,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-s8mp4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/loki/api/v1/status/buildinfo,Port:{0 3101 },Host:,Scheme:HTTPS,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:2,PeriodSeconds:30,SuccessThreshold:1,FailureThreshold:10,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/loki/api/v1/status/buildinfo,Port:{0 3101 },Host:,Scheme:HTTPS,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cloudkitty-lokistack-query-frontend-7c8cd744d9-vh5xk_openstack(10df4aa4-d920-45d4-9592-72c32d59c312): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 17:32:40 crc kubenswrapper[4753]: E1205 17:32:40.198257 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"loki-query-frontend\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-vh5xk" podUID="10df4aa4-d920-45d4-9592-72c32d59c312" Dec 05 17:32:40 crc kubenswrapper[4753]: E1205 17:32:40.200525 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 17:32:40 crc kubenswrapper[4753]: E1205 17:32:40.200978 4753 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-mariadb:current-podified" Dec 05 17:32:40 crc kubenswrapper[4753]: E1205 17:32:40.201189 4753 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:mysql-bootstrap,Image:quay.io/podified-antelope-centos9/openstack-mariadb:current-podified,Command:[bash /var/lib/operator-scripts/mysql_bootstrap.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:True,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:mysql-db,ReadOnly:false,MountPath:/var/lib/mysql,SubPath:mysql,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-default,ReadOnly:true,MountPath:/var/lib/config-data/default,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-generated,ReadOnly:false,MountPath:/var/lib/config-data/generated,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:operator-scripts,ReadOnly:true,MountPath:/var/lib/operator-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-vc59t,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-galera-0_openstack(9df69769-e394-444f-b6e2-e788e989fe92): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 17:32:40 crc kubenswrapper[4753]: E1205 17:32:40.202424 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/openstack-galera-0" podUID="9df69769-e394-444f-b6e2-e788e989fe92" Dec 05 17:32:40 crc kubenswrapper[4753]: E1205 17:32:40.225140 4753 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/openshift-logging/logging-loki-rhel9@sha256:06b83c3cbf0c5db4dd9812e046ca14189d18cf7b3c7f2f2c37aa705cc5f5deb7" Dec 05 17:32:40 crc kubenswrapper[4753]: E1205 17:32:40.225946 4753 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:loki-distributor,Image:registry.redhat.io/openshift-logging/logging-loki-rhel9@sha256:06b83c3cbf0c5db4dd9812e046ca14189d18cf7b3c7f2f2c37aa705cc5f5deb7,Command:[],Args:[-target=distributor -config.file=/etc/loki/config/config.yaml -runtime-config.file=/etc/loki/config/runtime-config.yaml -config.expand-env=true],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:3100,Protocol:TCP,HostIP:,},ContainerPort{Name:grpclb,HostPort:0,ContainerPort:9095,Protocol:TCP,HostIP:,},ContainerPort{Name:gossip-ring,HostPort:0,ContainerPort:7946,Protocol:TCP,HostIP:,},ContainerPort{Name:healthchecks,HostPort:0,ContainerPort:3101,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:false,MountPath:/etc/loki/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:cloudkitty-lokistack-distributor-http,ReadOnly:false,MountPath:/var/run/tls/http/server,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:cloudkitty-lokistack-distributor-grpc,ReadOnly:false,MountPath:/var/run/tls/grpc/server,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:cloudkitty-lokistack-ca-bundle,ReadOnly:false,MountPath:/var/run/ca,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-ps94k,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/loki/api/v1/status/buildinfo,Port:{0 3101 },Host:,Scheme:HTTPS,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:0,TimeoutSeconds:2,PeriodSeconds:30,SuccessThreshold:1,FailureThreshold:10,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/ready,Port:{0 3101 },Host:,Scheme:HTTPS,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cloudkitty-lokistack-distributor-664b687b54-jj67d_openstack(16d583e9-9ea0-4222-a38a-f8e1be33cdae): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 17:32:40 crc kubenswrapper[4753]: E1205 17:32:40.227249 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"loki-distributor\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openstack/cloudkitty-lokistack-distributor-664b687b54-jj67d" podUID="16d583e9-9ea0-4222-a38a-f8e1be33cdae" Dec 05 17:32:40 crc kubenswrapper[4753]: E1205 17:32:40.325922 4753 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-mariadb:current-podified" Dec 05 17:32:40 crc kubenswrapper[4753]: E1205 17:32:40.326071 4753 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:mysql-bootstrap,Image:quay.io/podified-antelope-centos9/openstack-mariadb:current-podified,Command:[bash /var/lib/operator-scripts/mysql_bootstrap.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:True,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:mysql-db,ReadOnly:false,MountPath:/var/lib/mysql,SubPath:mysql,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-default,ReadOnly:true,MountPath:/var/lib/config-data/default,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-generated,ReadOnly:false,MountPath:/var/lib/config-data/generated,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:operator-scripts,ReadOnly:true,MountPath:/var/lib/operator-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-v5fzj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-cell1-galera-0_openstack(4c8d8a7a-38bd-49d9-8f25-5495c32462bc): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 17:32:40 crc kubenswrapper[4753]: E1205 17:32:40.328315 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/openstack-cell1-galera-0" podUID="4c8d8a7a-38bd-49d9-8f25-5495c32462bc" Dec 05 17:32:40 crc kubenswrapper[4753]: E1205 17:32:40.456013 4753 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-ovn-nb-db-server:current-podified" Dec 05 17:32:40 crc kubenswrapper[4753]: E1205 17:32:40.456447 4753 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ovsdbserver-nb,Image:quay.io/podified-antelope-centos9/openstack-ovn-nb-db-server:current-podified,Command:[/usr/bin/dumb-init],Args:[/usr/local/bin/container-scripts/setup.sh],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n5f5h8bhb4hbch647h674h77h595h647h5f5hf5h68fh586h655h68ch6bh689h5dfh55ch64ch676h8bh8fhdh654h669h4h646h666h689h584h7fq,ValueFrom:nil,},EnvVar{Name:OVN_LOGDIR,Value:/tmp,ValueFrom:nil,},EnvVar{Name:OVN_RUNDIR,Value:/tmp,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovndbcluster-nb-etc-ovn,ReadOnly:false,MountPath:/etc/ovn,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdb-rundir,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-nb-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/certs/ovndb.crt,SubPath:tls.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-nb-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/private/ovndb.key,SubPath:tls.key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-nb-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/certs/ovndbca.crt,SubPath:ca.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-8mks7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/pidof ovsdb-server],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/pidof ovsdb-server],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:&Lifecycle{PostStart:nil,PreStop:&LifecycleHandler{Exec:&ExecAction{Command:[/usr/local/bin/container-scripts/cleanup.sh],},HTTPGet:nil,TCPSocket:nil,Sleep:nil,},},TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/pidof ovsdb-server],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:20,TerminationGracePeriodSeconds:nil,},ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovsdbserver-nb-0_openstack(f8c13e8e-fec9-49e2-a2b0-5ca0473d2469): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 17:32:40 crc kubenswrapper[4753]: I1205 17:32:40.650024 4753 scope.go:117] "RemoveContainer" containerID="87682a74661e693e498cd793cc20d16fc9f4a3b8a1a6b54f10285e2dcd15eafd" Dec 05 17:32:40 crc kubenswrapper[4753]: E1205 17:32:40.650274 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 17:32:40 crc kubenswrapper[4753]: E1205 17:32:40.651767 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-mariadb:current-podified\\\"\"" pod="openstack/openstack-galera-0" podUID="9df69769-e394-444f-b6e2-e788e989fe92" Dec 05 17:32:40 crc kubenswrapper[4753]: E1205 17:32:40.652087 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-mariadb:current-podified\\\"\"" pod="openstack/openstack-cell1-galera-0" podUID="4c8d8a7a-38bd-49d9-8f25-5495c32462bc" Dec 05 17:32:40 crc kubenswrapper[4753]: E1205 17:32:40.904763 4753 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified" Dec 05 17:32:40 crc kubenswrapper[4753]: E1205 17:32:40.904979 4753 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ovn-controller,Image:quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified,Command:[ovn-controller --pidfile unix:/run/openvswitch/db.sock --certificate=/etc/pki/tls/certs/ovndb.crt --private-key=/etc/pki/tls/private/ovndb.key --ca-cert=/etc/pki/tls/certs/ovndbca.crt],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n656h55bh5bbh55ch99h596hch9dh688h89h77hf4h679h689h675hb6h84h664h564h5c8h674h564h575h5b6h66dh55bh5b5h5fch57h57h645hfdq,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:var-run,ReadOnly:false,MountPath:/var/run/openvswitch,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:var-run-ovn,ReadOnly:false,MountPath:/var/run/ovn,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:var-log-ovn,ReadOnly:false,MountPath:/var/log/ovn,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovn-controller-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/certs/ovndb.crt,SubPath:tls.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovn-controller-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/private/ovndb.key,SubPath:tls.key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovn-controller-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/certs/ovndbca.crt,SubPath:ca.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-cltgk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/local/bin/container-scripts/ovn_controller_liveness.sh],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:30,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/local/bin/container-scripts/ovn_controller_readiness.sh],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:30,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:&Lifecycle{PostStart:nil,PreStop:&LifecycleHandler{Exec:&ExecAction{Command:[/usr/share/ovn/scripts/ovn-ctl stop_controller],},HTTPGet:nil,TCPSocket:nil,Sleep:nil,},},TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[NET_ADMIN SYS_ADMIN SYS_NICE],Drop:[],},Privileged:*true,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-controller-8m7cw_openstack(9f33836c-96c6-4da3-b2d6-e9c12631f2b4): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 17:32:40 crc kubenswrapper[4753]: E1205 17:32:40.906410 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovn-controller\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/ovn-controller-8m7cw" podUID="9f33836c-96c6-4da3-b2d6-e9c12631f2b4" Dec 05 17:32:41 crc kubenswrapper[4753]: E1205 17:32:41.117438 4753 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-ovn-sb-db-server:current-podified" Dec 05 17:32:41 crc kubenswrapper[4753]: E1205 17:32:41.117658 4753 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ovsdbserver-sb,Image:quay.io/podified-antelope-centos9/openstack-ovn-sb-db-server:current-podified,Command:[/usr/bin/dumb-init],Args:[/usr/local/bin/container-scripts/setup.sh],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n675h5dbh76h55bh64fhc6h9dh5cch5cbhc8h578h79hf5h9bh5bhc8h647h65h5fh664h67fh59h589h59h588h55h67fh5d8h686h698h58dh79q,ValueFrom:nil,},EnvVar{Name:OVN_LOGDIR,Value:/tmp,ValueFrom:nil,},EnvVar{Name:OVN_RUNDIR,Value:/tmp,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovndbcluster-sb-etc-ovn,ReadOnly:false,MountPath:/etc/ovn,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdb-rundir,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-sb-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/certs/ovndb.crt,SubPath:tls.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-sb-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/private/ovndb.key,SubPath:tls.key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-sb-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/certs/ovndbca.crt,SubPath:ca.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-bvjlc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/pidof ovsdb-server],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/pidof ovsdb-server],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:&Lifecycle{PostStart:nil,PreStop:&LifecycleHandler{Exec:&ExecAction{Command:[/usr/local/bin/container-scripts/cleanup.sh],},HTTPGet:nil,TCPSocket:nil,Sleep:nil,},},TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/pidof ovsdb-server],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:20,TerminationGracePeriodSeconds:nil,},ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovsdbserver-sb-0_openstack(09e6b220-0a70-4359-93f4-4450b2e458c8): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 17:32:41 crc kubenswrapper[4753]: E1205 17:32:41.659660 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovn-controller\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-ovn-controller:current-podified\\\"\"" pod="openstack/ovn-controller-8m7cw" podUID="9f33836c-96c6-4da3-b2d6-e9c12631f2b4" Dec 05 17:32:45 crc kubenswrapper[4753]: I1205 17:32:45.738546 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"8978936d-d71e-4840-9cc4-666746ebeecf","Type":"ContainerStarted","Data":"6c5d29c5a1d80668d0380cff4865d01449ecf726302a454839c7bba591adc4df"} Dec 05 17:32:45 crc kubenswrapper[4753]: I1205 17:32:45.738979 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Dec 05 17:32:45 crc kubenswrapper[4753]: I1205 17:32:45.742358 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-distributor-664b687b54-jj67d" event={"ID":"16d583e9-9ea0-4222-a38a-f8e1be33cdae","Type":"ContainerStarted","Data":"0982fa0a66571f0598edd74de8bc21cf9f25883ed25c9e121af6bb348d2bdfc7"} Dec 05 17:32:45 crc kubenswrapper[4753]: I1205 17:32:45.742657 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-lokistack-distributor-664b687b54-jj67d" Dec 05 17:32:45 crc kubenswrapper[4753]: I1205 17:32:45.744881 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-vh5xk" event={"ID":"10df4aa4-d920-45d4-9592-72c32d59c312","Type":"ContainerStarted","Data":"f3b6bce2f391b3c2c965e70f45fb110486c1309ced897a9b8d191ef803bbe68a"} Dec 05 17:32:45 crc kubenswrapper[4753]: I1205 17:32:45.745082 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-vh5xk" Dec 05 17:32:45 crc kubenswrapper[4753]: I1205 17:32:45.749136 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-gateway-bc75944f-pqqjw" event={"ID":"8f82a8e6-b07e-4bf9-801e-04c1f96fe703","Type":"ContainerStarted","Data":"d2f0a16549ed58724787942608f8066e43c4088c4436c7402a8dd55b1b2a7ff8"} Dec 05 17:32:45 crc kubenswrapper[4753]: I1205 17:32:45.749375 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-lokistack-gateway-bc75944f-pqqjw" Dec 05 17:32:45 crc kubenswrapper[4753]: I1205 17:32:45.751920 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-index-gateway-0" event={"ID":"93568770-efee-4906-b491-17d0664bfa8b","Type":"ContainerStarted","Data":"2a1f531cefe541d7de2a6876dc1c043c3982eab7542321530269de10fe9a7119"} Dec 05 17:32:45 crc kubenswrapper[4753]: I1205 17:32:45.752054 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 05 17:32:45 crc kubenswrapper[4753]: I1205 17:32:45.762369 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=27.734090189 podStartE2EDuration="46.762349387s" podCreationTimestamp="2025-12-05 17:31:59 +0000 UTC" firstStartedPulling="2025-12-05 17:32:22.074046895 +0000 UTC m=+1680.577153901" lastFinishedPulling="2025-12-05 17:32:41.102306083 +0000 UTC m=+1699.605413099" observedRunningTime="2025-12-05 17:32:45.757830409 +0000 UTC m=+1704.260937435" watchObservedRunningTime="2025-12-05 17:32:45.762349387 +0000 UTC m=+1704.265456413" Dec 05 17:32:45 crc kubenswrapper[4753]: I1205 17:32:45.764726 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-lokistack-gateway-bc75944f-pqqjw" Dec 05 17:32:45 crc kubenswrapper[4753]: I1205 17:32:45.795816 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-lokistack-gateway-bc75944f-pqqjw" podStartSLOduration=14.831506714 podStartE2EDuration="32.795794715s" podCreationTimestamp="2025-12-05 17:32:13 +0000 UTC" firstStartedPulling="2025-12-05 17:32:23.137437996 +0000 UTC m=+1681.640545002" lastFinishedPulling="2025-12-05 17:32:41.101725957 +0000 UTC m=+1699.604833003" observedRunningTime="2025-12-05 17:32:45.781895501 +0000 UTC m=+1704.285002517" watchObservedRunningTime="2025-12-05 17:32:45.795794715 +0000 UTC m=+1704.298901741" Dec 05 17:32:45 crc kubenswrapper[4753]: I1205 17:32:45.800653 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-vh5xk" podStartSLOduration=-9223372004.054142 podStartE2EDuration="32.800633162s" podCreationTimestamp="2025-12-05 17:32:13 +0000 UTC" firstStartedPulling="2025-12-05 17:32:23.103042306 +0000 UTC m=+1681.606149322" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:32:45.798711617 +0000 UTC m=+1704.301818623" watchObservedRunningTime="2025-12-05 17:32:45.800633162 +0000 UTC m=+1704.303740188" Dec 05 17:32:45 crc kubenswrapper[4753]: I1205 17:32:45.820037 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-lokistack-distributor-664b687b54-jj67d" podStartSLOduration=-9223372003.03476 podStartE2EDuration="33.820015441s" podCreationTimestamp="2025-12-05 17:32:12 +0000 UTC" firstStartedPulling="2025-12-05 17:32:23.083398762 +0000 UTC m=+1681.586505768" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:32:45.811554981 +0000 UTC m=+1704.314661997" watchObservedRunningTime="2025-12-05 17:32:45.820015441 +0000 UTC m=+1704.323122447" Dec 05 17:32:45 crc kubenswrapper[4753]: I1205 17:32:45.835938 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-lokistack-index-gateway-0" podStartSLOduration=14.927687842 podStartE2EDuration="32.835923681s" podCreationTimestamp="2025-12-05 17:32:13 +0000 UTC" firstStartedPulling="2025-12-05 17:32:23.193841048 +0000 UTC m=+1681.696948044" lastFinishedPulling="2025-12-05 17:32:41.102076877 +0000 UTC m=+1699.605183883" observedRunningTime="2025-12-05 17:32:45.831568688 +0000 UTC m=+1704.334675704" watchObservedRunningTime="2025-12-05 17:32:45.835923681 +0000 UTC m=+1704.339030687" Dec 05 17:32:46 crc kubenswrapper[4753]: I1205 17:32:46.774679 4753 generic.go:334] "Generic (PLEG): container finished" podID="284db7f3-ca89-447a-90eb-487d43e49f7d" containerID="85da4239626257459734af089fcb3dca66eae124d85ef186fb7dbf0029e47b87" exitCode=0 Dec 05 17:32:46 crc kubenswrapper[4753]: I1205 17:32:46.774788 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-qhphp" event={"ID":"284db7f3-ca89-447a-90eb-487d43e49f7d","Type":"ContainerDied","Data":"85da4239626257459734af089fcb3dca66eae124d85ef186fb7dbf0029e47b87"} Dec 05 17:32:48 crc kubenswrapper[4753]: I1205 17:32:48.796353 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"f409c39c-6d5a-4950-bd92-2ab8a26ad831","Type":"ContainerStarted","Data":"12aabcd0b9c2c97fda68cdaa60c8d92cea878b1cd3a98e5eed9d4f8d6cebbba4"} Dec 05 17:32:49 crc kubenswrapper[4753]: I1205 17:32:49.512390 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Dec 05 17:32:50 crc kubenswrapper[4753]: I1205 17:32:50.842418 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-gateway-bc75944f-8prh2" event={"ID":"389b7205-589e-4027-ae02-ba2287c7e0ed","Type":"ContainerStarted","Data":"c23a990c912cd61532863e4d3846065590f5372df84d3050086019be7d7ab22c"} Dec 05 17:32:50 crc kubenswrapper[4753]: I1205 17:32:50.843437 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-lokistack-gateway-bc75944f-8prh2" Dec 05 17:32:50 crc kubenswrapper[4753]: I1205 17:32:50.850880 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"c44e1a93-e233-46a2-b18a-e6c8c396a394","Type":"ContainerStarted","Data":"049f8aa25e2c75d1e632b63ae6779813096ab36c26a5352edb4719110afbe477"} Dec 05 17:32:50 crc kubenswrapper[4753]: I1205 17:32:50.850881 4753 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cloudkitty-lokistack-gateway-bc75944f-8prh2" podUID="389b7205-589e-4027-ae02-ba2287c7e0ed" containerName="gateway" probeResult="failure" output="Get \"https://10.217.0.127:8081/ready\": dial tcp 10.217.0.127:8081: connect: connection refused" Dec 05 17:32:50 crc kubenswrapper[4753]: I1205 17:32:50.856971 4753 generic.go:334] "Generic (PLEG): container finished" podID="30fe3d68-2fa2-416b-abac-b804b056a8af" containerID="6a26f46620df0cc5d80e3e2f9b0b4e3062309fce0cb87bc87d6c51ecd3d77f7d" exitCode=0 Dec 05 17:32:50 crc kubenswrapper[4753]: I1205 17:32:50.857020 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-rchhw" event={"ID":"30fe3d68-2fa2-416b-abac-b804b056a8af","Type":"ContainerDied","Data":"6a26f46620df0cc5d80e3e2f9b0b4e3062309fce0cb87bc87d6c51ecd3d77f7d"} Dec 05 17:32:50 crc kubenswrapper[4753]: I1205 17:32:50.872511 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-lokistack-gateway-bc75944f-8prh2" podStartSLOduration=16.695493194 podStartE2EDuration="37.87249416s" podCreationTimestamp="2025-12-05 17:32:13 +0000 UTC" firstStartedPulling="2025-12-05 17:32:23.137544419 +0000 UTC m=+1681.640651425" lastFinishedPulling="2025-12-05 17:32:44.314545385 +0000 UTC m=+1702.817652391" observedRunningTime="2025-12-05 17:32:50.868731703 +0000 UTC m=+1709.371838719" watchObservedRunningTime="2025-12-05 17:32:50.87249416 +0000 UTC m=+1709.375601166" Dec 05 17:32:50 crc kubenswrapper[4753]: I1205 17:32:50.897288 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-rchhw"] Dec 05 17:32:50 crc kubenswrapper[4753]: I1205 17:32:50.943663 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7cb5889db5-9krvs"] Dec 05 17:32:50 crc kubenswrapper[4753]: I1205 17:32:50.945282 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7cb5889db5-9krvs" Dec 05 17:32:51 crc kubenswrapper[4753]: I1205 17:32:51.006032 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7cb5889db5-9krvs"] Dec 05 17:32:51 crc kubenswrapper[4753]: I1205 17:32:51.095450 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a91f1699-f2d1-4335-8f7f-cb44dae997da-dns-svc\") pod \"dnsmasq-dns-7cb5889db5-9krvs\" (UID: \"a91f1699-f2d1-4335-8f7f-cb44dae997da\") " pod="openstack/dnsmasq-dns-7cb5889db5-9krvs" Dec 05 17:32:51 crc kubenswrapper[4753]: I1205 17:32:51.095529 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w2lvv\" (UniqueName: \"kubernetes.io/projected/a91f1699-f2d1-4335-8f7f-cb44dae997da-kube-api-access-w2lvv\") pod \"dnsmasq-dns-7cb5889db5-9krvs\" (UID: \"a91f1699-f2d1-4335-8f7f-cb44dae997da\") " pod="openstack/dnsmasq-dns-7cb5889db5-9krvs" Dec 05 17:32:51 crc kubenswrapper[4753]: I1205 17:32:51.095686 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a91f1699-f2d1-4335-8f7f-cb44dae997da-config\") pod \"dnsmasq-dns-7cb5889db5-9krvs\" (UID: \"a91f1699-f2d1-4335-8f7f-cb44dae997da\") " pod="openstack/dnsmasq-dns-7cb5889db5-9krvs" Dec 05 17:32:51 crc kubenswrapper[4753]: I1205 17:32:51.199355 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a91f1699-f2d1-4335-8f7f-cb44dae997da-config\") pod \"dnsmasq-dns-7cb5889db5-9krvs\" (UID: \"a91f1699-f2d1-4335-8f7f-cb44dae997da\") " pod="openstack/dnsmasq-dns-7cb5889db5-9krvs" Dec 05 17:32:51 crc kubenswrapper[4753]: I1205 17:32:51.199682 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a91f1699-f2d1-4335-8f7f-cb44dae997da-dns-svc\") pod \"dnsmasq-dns-7cb5889db5-9krvs\" (UID: \"a91f1699-f2d1-4335-8f7f-cb44dae997da\") " pod="openstack/dnsmasq-dns-7cb5889db5-9krvs" Dec 05 17:32:51 crc kubenswrapper[4753]: I1205 17:32:51.199751 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w2lvv\" (UniqueName: \"kubernetes.io/projected/a91f1699-f2d1-4335-8f7f-cb44dae997da-kube-api-access-w2lvv\") pod \"dnsmasq-dns-7cb5889db5-9krvs\" (UID: \"a91f1699-f2d1-4335-8f7f-cb44dae997da\") " pod="openstack/dnsmasq-dns-7cb5889db5-9krvs" Dec 05 17:32:51 crc kubenswrapper[4753]: I1205 17:32:51.200556 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a91f1699-f2d1-4335-8f7f-cb44dae997da-config\") pod \"dnsmasq-dns-7cb5889db5-9krvs\" (UID: \"a91f1699-f2d1-4335-8f7f-cb44dae997da\") " pod="openstack/dnsmasq-dns-7cb5889db5-9krvs" Dec 05 17:32:51 crc kubenswrapper[4753]: I1205 17:32:51.200649 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a91f1699-f2d1-4335-8f7f-cb44dae997da-dns-svc\") pod \"dnsmasq-dns-7cb5889db5-9krvs\" (UID: \"a91f1699-f2d1-4335-8f7f-cb44dae997da\") " pod="openstack/dnsmasq-dns-7cb5889db5-9krvs" Dec 05 17:32:51 crc kubenswrapper[4753]: I1205 17:32:51.217041 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w2lvv\" (UniqueName: \"kubernetes.io/projected/a91f1699-f2d1-4335-8f7f-cb44dae997da-kube-api-access-w2lvv\") pod \"dnsmasq-dns-7cb5889db5-9krvs\" (UID: \"a91f1699-f2d1-4335-8f7f-cb44dae997da\") " pod="openstack/dnsmasq-dns-7cb5889db5-9krvs" Dec 05 17:32:51 crc kubenswrapper[4753]: I1205 17:32:51.282058 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7cb5889db5-9krvs" Dec 05 17:32:51 crc kubenswrapper[4753]: I1205 17:32:51.873650 4753 generic.go:334] "Generic (PLEG): container finished" podID="22d4d35e-da87-4f57-a984-0bd8a4b10a3f" containerID="7c44e1eab9372fb992da2ce929672610038b4adfc4c7a3b5a9ad6c81dd3f3c16" exitCode=0 Dec 05 17:32:51 crc kubenswrapper[4753]: I1205 17:32:51.873674 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-vznhm" event={"ID":"22d4d35e-da87-4f57-a984-0bd8a4b10a3f","Type":"ContainerDied","Data":"7c44e1eab9372fb992da2ce929672610038b4adfc4c7a3b5a9ad6c81dd3f3c16"} Dec 05 17:32:51 crc kubenswrapper[4753]: I1205 17:32:51.899203 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-lokistack-gateway-bc75944f-8prh2" Dec 05 17:32:52 crc kubenswrapper[4753]: I1205 17:32:52.071776 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Dec 05 17:32:52 crc kubenswrapper[4753]: I1205 17:32:52.123619 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 05 17:32:52 crc kubenswrapper[4753]: I1205 17:32:52.135519 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Dec 05 17:32:52 crc kubenswrapper[4753]: I1205 17:32:52.135712 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Dec 05 17:32:52 crc kubenswrapper[4753]: I1205 17:32:52.136123 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-z2zwj" Dec 05 17:32:52 crc kubenswrapper[4753]: I1205 17:32:52.136220 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Dec 05 17:32:52 crc kubenswrapper[4753]: I1205 17:32:52.157502 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Dec 05 17:32:52 crc kubenswrapper[4753]: I1205 17:32:52.241471 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gqzw5\" (UniqueName: \"kubernetes.io/projected/f4707e97-4f70-42d5-959e-1d2c8a9629e5-kube-api-access-gqzw5\") pod \"swift-storage-0\" (UID: \"f4707e97-4f70-42d5-959e-1d2c8a9629e5\") " pod="openstack/swift-storage-0" Dec 05 17:32:52 crc kubenswrapper[4753]: I1205 17:32:52.241595 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/f4707e97-4f70-42d5-959e-1d2c8a9629e5-lock\") pod \"swift-storage-0\" (UID: \"f4707e97-4f70-42d5-959e-1d2c8a9629e5\") " pod="openstack/swift-storage-0" Dec 05 17:32:52 crc kubenswrapper[4753]: I1205 17:32:52.241859 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f4707e97-4f70-42d5-959e-1d2c8a9629e5-etc-swift\") pod \"swift-storage-0\" (UID: \"f4707e97-4f70-42d5-959e-1d2c8a9629e5\") " pod="openstack/swift-storage-0" Dec 05 17:32:52 crc kubenswrapper[4753]: I1205 17:32:52.241993 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-07f78576-169d-425b-8f33-7454553bedcc\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-07f78576-169d-425b-8f33-7454553bedcc\") pod \"swift-storage-0\" (UID: \"f4707e97-4f70-42d5-959e-1d2c8a9629e5\") " pod="openstack/swift-storage-0" Dec 05 17:32:52 crc kubenswrapper[4753]: I1205 17:32:52.242022 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/f4707e97-4f70-42d5-959e-1d2c8a9629e5-cache\") pod \"swift-storage-0\" (UID: \"f4707e97-4f70-42d5-959e-1d2c8a9629e5\") " pod="openstack/swift-storage-0" Dec 05 17:32:52 crc kubenswrapper[4753]: I1205 17:32:52.343743 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/f4707e97-4f70-42d5-959e-1d2c8a9629e5-lock\") pod \"swift-storage-0\" (UID: \"f4707e97-4f70-42d5-959e-1d2c8a9629e5\") " pod="openstack/swift-storage-0" Dec 05 17:32:52 crc kubenswrapper[4753]: I1205 17:32:52.343862 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f4707e97-4f70-42d5-959e-1d2c8a9629e5-etc-swift\") pod \"swift-storage-0\" (UID: \"f4707e97-4f70-42d5-959e-1d2c8a9629e5\") " pod="openstack/swift-storage-0" Dec 05 17:32:52 crc kubenswrapper[4753]: I1205 17:32:52.343905 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-07f78576-169d-425b-8f33-7454553bedcc\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-07f78576-169d-425b-8f33-7454553bedcc\") pod \"swift-storage-0\" (UID: \"f4707e97-4f70-42d5-959e-1d2c8a9629e5\") " pod="openstack/swift-storage-0" Dec 05 17:32:52 crc kubenswrapper[4753]: I1205 17:32:52.343929 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/f4707e97-4f70-42d5-959e-1d2c8a9629e5-cache\") pod \"swift-storage-0\" (UID: \"f4707e97-4f70-42d5-959e-1d2c8a9629e5\") " pod="openstack/swift-storage-0" Dec 05 17:32:52 crc kubenswrapper[4753]: I1205 17:32:52.344001 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gqzw5\" (UniqueName: \"kubernetes.io/projected/f4707e97-4f70-42d5-959e-1d2c8a9629e5-kube-api-access-gqzw5\") pod \"swift-storage-0\" (UID: \"f4707e97-4f70-42d5-959e-1d2c8a9629e5\") " pod="openstack/swift-storage-0" Dec 05 17:32:52 crc kubenswrapper[4753]: E1205 17:32:52.344068 4753 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 05 17:32:52 crc kubenswrapper[4753]: E1205 17:32:52.344089 4753 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 05 17:32:52 crc kubenswrapper[4753]: E1205 17:32:52.344383 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/f4707e97-4f70-42d5-959e-1d2c8a9629e5-etc-swift podName:f4707e97-4f70-42d5-959e-1d2c8a9629e5 nodeName:}" failed. No retries permitted until 2025-12-05 17:32:52.844123647 +0000 UTC m=+1711.347230653 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/f4707e97-4f70-42d5-959e-1d2c8a9629e5-etc-swift") pod "swift-storage-0" (UID: "f4707e97-4f70-42d5-959e-1d2c8a9629e5") : configmap "swift-ring-files" not found Dec 05 17:32:52 crc kubenswrapper[4753]: I1205 17:32:52.345011 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/f4707e97-4f70-42d5-959e-1d2c8a9629e5-lock\") pod \"swift-storage-0\" (UID: \"f4707e97-4f70-42d5-959e-1d2c8a9629e5\") " pod="openstack/swift-storage-0" Dec 05 17:32:52 crc kubenswrapper[4753]: I1205 17:32:52.345076 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/f4707e97-4f70-42d5-959e-1d2c8a9629e5-cache\") pod \"swift-storage-0\" (UID: \"f4707e97-4f70-42d5-959e-1d2c8a9629e5\") " pod="openstack/swift-storage-0" Dec 05 17:32:52 crc kubenswrapper[4753]: I1205 17:32:52.355406 4753 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 05 17:32:52 crc kubenswrapper[4753]: I1205 17:32:52.355459 4753 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-07f78576-169d-425b-8f33-7454553bedcc\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-07f78576-169d-425b-8f33-7454553bedcc\") pod \"swift-storage-0\" (UID: \"f4707e97-4f70-42d5-959e-1d2c8a9629e5\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/a4c11556a1da0e3d6891688e03046c304f8f1745337ab3c3e492821151a7e4d7/globalmount\"" pod="openstack/swift-storage-0" Dec 05 17:32:52 crc kubenswrapper[4753]: I1205 17:32:52.380766 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gqzw5\" (UniqueName: \"kubernetes.io/projected/f4707e97-4f70-42d5-959e-1d2c8a9629e5-kube-api-access-gqzw5\") pod \"swift-storage-0\" (UID: \"f4707e97-4f70-42d5-959e-1d2c8a9629e5\") " pod="openstack/swift-storage-0" Dec 05 17:32:52 crc kubenswrapper[4753]: I1205 17:32:52.437779 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7cb5889db5-9krvs"] Dec 05 17:32:52 crc kubenswrapper[4753]: W1205 17:32:52.501309 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda91f1699_f2d1_4335_8f7f_cb44dae997da.slice/crio-bfc43738d27a13f208969ceb53e28f9c5a590d39ae9e1167cf1968d6457c5d5d WatchSource:0}: Error finding container bfc43738d27a13f208969ceb53e28f9c5a590d39ae9e1167cf1968d6457c5d5d: Status 404 returned error can't find the container with id bfc43738d27a13f208969ceb53e28f9c5a590d39ae9e1167cf1968d6457c5d5d Dec 05 17:32:52 crc kubenswrapper[4753]: I1205 17:32:52.570896 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-07f78576-169d-425b-8f33-7454553bedcc\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-07f78576-169d-425b-8f33-7454553bedcc\") pod \"swift-storage-0\" (UID: \"f4707e97-4f70-42d5-959e-1d2c8a9629e5\") " pod="openstack/swift-storage-0" Dec 05 17:32:52 crc kubenswrapper[4753]: I1205 17:32:52.860041 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f4707e97-4f70-42d5-959e-1d2c8a9629e5-etc-swift\") pod \"swift-storage-0\" (UID: \"f4707e97-4f70-42d5-959e-1d2c8a9629e5\") " pod="openstack/swift-storage-0" Dec 05 17:32:52 crc kubenswrapper[4753]: E1205 17:32:52.860283 4753 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 05 17:32:52 crc kubenswrapper[4753]: E1205 17:32:52.860301 4753 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 05 17:32:52 crc kubenswrapper[4753]: E1205 17:32:52.860353 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/f4707e97-4f70-42d5-959e-1d2c8a9629e5-etc-swift podName:f4707e97-4f70-42d5-959e-1d2c8a9629e5 nodeName:}" failed. No retries permitted until 2025-12-05 17:32:53.860337316 +0000 UTC m=+1712.363444322 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/f4707e97-4f70-42d5-959e-1d2c8a9629e5-etc-swift") pod "swift-storage-0" (UID: "f4707e97-4f70-42d5-959e-1d2c8a9629e5") : configmap "swift-ring-files" not found Dec 05 17:32:52 crc kubenswrapper[4753]: I1205 17:32:52.884293 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cb5889db5-9krvs" event={"ID":"a91f1699-f2d1-4335-8f7f-cb44dae997da","Type":"ContainerStarted","Data":"bfc43738d27a13f208969ceb53e28f9c5a590d39ae9e1167cf1968d6457c5d5d"} Dec 05 17:32:52 crc kubenswrapper[4753]: I1205 17:32:52.885854 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-querier-5467947bf7-7l6pk" event={"ID":"89ac2139-b38d-40b1-939d-b23748c819d0","Type":"ContainerStarted","Data":"40bf851f219265809becdb54ed04258385ac665274d2ca49366a8e6d3b3b8ac5"} Dec 05 17:32:52 crc kubenswrapper[4753]: I1205 17:32:52.886030 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-lokistack-querier-5467947bf7-7l6pk" Dec 05 17:32:52 crc kubenswrapper[4753]: I1205 17:32:52.890176 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-compactor-0" event={"ID":"55b8a3d1-6aee-4a8d-8b7e-5f69ed46970b","Type":"ContainerStarted","Data":"61452d80777b27b677f36cf634edd02a42a70c05dfe89bd50c054780f023a6c3"} Dec 05 17:32:52 crc kubenswrapper[4753]: I1205 17:32:52.890323 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-lokistack-compactor-0" Dec 05 17:32:52 crc kubenswrapper[4753]: I1205 17:32:52.891351 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-lokistack-ingester-0" event={"ID":"227cc7e4-602f-4c1e-afa7-0e106d3f505f","Type":"ContainerStarted","Data":"79694404a1ea4098b1728d413f0dab3473a5c7d05e247b4e4a4c26fb31882f9a"} Dec 05 17:32:52 crc kubenswrapper[4753]: I1205 17:32:52.891522 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-lokistack-ingester-0" Dec 05 17:32:52 crc kubenswrapper[4753]: I1205 17:32:52.910231 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-lokistack-querier-5467947bf7-7l6pk" podStartSLOduration=-9223371995.944561 podStartE2EDuration="40.910215049s" podCreationTimestamp="2025-12-05 17:32:12 +0000 UTC" firstStartedPulling="2025-12-05 17:32:23.131804307 +0000 UTC m=+1681.634911313" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:32:52.90532512 +0000 UTC m=+1711.408432126" watchObservedRunningTime="2025-12-05 17:32:52.910215049 +0000 UTC m=+1711.413322055" Dec 05 17:32:52 crc kubenswrapper[4753]: I1205 17:32:52.937339 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-lokistack-ingester-0" podStartSLOduration=-9223371995.91746 podStartE2EDuration="40.937315816s" podCreationTimestamp="2025-12-05 17:32:12 +0000 UTC" firstStartedPulling="2025-12-05 17:32:23.101265516 +0000 UTC m=+1681.604372532" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:32:52.935051082 +0000 UTC m=+1711.438158088" watchObservedRunningTime="2025-12-05 17:32:52.937315816 +0000 UTC m=+1711.440422822" Dec 05 17:32:52 crc kubenswrapper[4753]: I1205 17:32:52.956576 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-lokistack-compactor-0" podStartSLOduration=-9223371996.898249 podStartE2EDuration="39.95652739s" podCreationTimestamp="2025-12-05 17:32:13 +0000 UTC" firstStartedPulling="2025-12-05 17:32:23.128454813 +0000 UTC m=+1681.631561819" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:32:52.950773577 +0000 UTC m=+1711.453880573" watchObservedRunningTime="2025-12-05 17:32:52.95652739 +0000 UTC m=+1711.459634406" Dec 05 17:32:52 crc kubenswrapper[4753]: E1205 17:32:52.996046 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovsdbserver-nb\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/ovsdbserver-nb-0" podUID="f8c13e8e-fec9-49e2-a2b0-5ca0473d2469" Dec 05 17:32:53 crc kubenswrapper[4753]: E1205 17:32:53.000092 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovsdbserver-sb\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/ovsdbserver-sb-0" podUID="09e6b220-0a70-4359-93f4-4450b2e458c8" Dec 05 17:32:53 crc kubenswrapper[4753]: E1205 17:32:53.201808 4753 log.go:32] "CreateContainer in sandbox from runtime service failed" err=< Dec 05 17:32:53 crc kubenswrapper[4753]: rpc error: code = Unknown desc = container create failed: mount `/var/lib/kubelet/pods/22d4d35e-da87-4f57-a984-0bd8a4b10a3f/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Dec 05 17:32:53 crc kubenswrapper[4753]: > podSandboxID="34244e18449a7b1dfec03911f275735a658e5e78ae586ab4f95c0c8f46d7e8b3" Dec 05 17:32:53 crc kubenswrapper[4753]: E1205 17:32:53.202057 4753 kuberuntime_manager.go:1274] "Unhandled Error" err=< Dec 05 17:32:53 crc kubenswrapper[4753]: container &Container{Name:dnsmasq-dns,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n68chd6h679hbfh55fhc6h5ffh5d8h94h56ch589hb4hc5h57bh677hcdh655h8dh667h675h654h66ch567h8fh659h5b4h675h566h55bh54h67dh6dq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-g8lr4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-666b6646f7-vznhm_openstack(22d4d35e-da87-4f57-a984-0bd8a4b10a3f): CreateContainerError: container create failed: mount `/var/lib/kubelet/pods/22d4d35e-da87-4f57-a984-0bd8a4b10a3f/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Dec 05 17:32:53 crc kubenswrapper[4753]: > logger="UnhandledError" Dec 05 17:32:53 crc kubenswrapper[4753]: E1205 17:32:53.203921 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"dnsmasq-dns\" with CreateContainerError: \"container create failed: mount `/var/lib/kubelet/pods/22d4d35e-da87-4f57-a984-0bd8a4b10a3f/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory\\n\"" pod="openstack/dnsmasq-dns-666b6646f7-vznhm" podUID="22d4d35e-da87-4f57-a984-0bd8a4b10a3f" Dec 05 17:32:53 crc kubenswrapper[4753]: E1205 17:32:53.880877 4753 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 05 17:32:53 crc kubenswrapper[4753]: E1205 17:32:53.881187 4753 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 05 17:32:53 crc kubenswrapper[4753]: I1205 17:32:53.880940 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f4707e97-4f70-42d5-959e-1d2c8a9629e5-etc-swift\") pod \"swift-storage-0\" (UID: \"f4707e97-4f70-42d5-959e-1d2c8a9629e5\") " pod="openstack/swift-storage-0" Dec 05 17:32:53 crc kubenswrapper[4753]: E1205 17:32:53.881241 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/f4707e97-4f70-42d5-959e-1d2c8a9629e5-etc-swift podName:f4707e97-4f70-42d5-959e-1d2c8a9629e5 nodeName:}" failed. No retries permitted until 2025-12-05 17:32:55.881224658 +0000 UTC m=+1714.384331734 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/f4707e97-4f70-42d5-959e-1d2c8a9629e5-etc-swift") pod "swift-storage-0" (UID: "f4707e97-4f70-42d5-959e-1d2c8a9629e5") : configmap "swift-ring-files" not found Dec 05 17:32:53 crc kubenswrapper[4753]: I1205 17:32:53.903675 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-rchhw" event={"ID":"30fe3d68-2fa2-416b-abac-b804b056a8af","Type":"ContainerStarted","Data":"a96547d7875d188e09ac520613645bd3c27f92d9ef61c44c795730b8f1b4223e"} Dec 05 17:32:53 crc kubenswrapper[4753]: I1205 17:32:53.903816 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-57d769cc4f-rchhw" podUID="30fe3d68-2fa2-416b-abac-b804b056a8af" containerName="dnsmasq-dns" containerID="cri-o://a96547d7875d188e09ac520613645bd3c27f92d9ef61c44c795730b8f1b4223e" gracePeriod=10 Dec 05 17:32:53 crc kubenswrapper[4753]: I1205 17:32:53.904123 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-57d769cc4f-rchhw" Dec 05 17:32:53 crc kubenswrapper[4753]: I1205 17:32:53.906312 4753 generic.go:334] "Generic (PLEG): container finished" podID="a91f1699-f2d1-4335-8f7f-cb44dae997da" containerID="9c76fbf68c8fbe2c8007301247c7dbcde5dca3dfe45a42354d2c16e237cdd9a7" exitCode=0 Dec 05 17:32:53 crc kubenswrapper[4753]: I1205 17:32:53.906373 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cb5889db5-9krvs" event={"ID":"a91f1699-f2d1-4335-8f7f-cb44dae997da","Type":"ContainerDied","Data":"9c76fbf68c8fbe2c8007301247c7dbcde5dca3dfe45a42354d2c16e237cdd9a7"} Dec 05 17:32:53 crc kubenswrapper[4753]: I1205 17:32:53.912260 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-qhphp" event={"ID":"284db7f3-ca89-447a-90eb-487d43e49f7d","Type":"ContainerStarted","Data":"ce88d43f784deefe6f928aa3a01226e064e5d425920707c2887428054f8008db"} Dec 05 17:32:53 crc kubenswrapper[4753]: I1205 17:32:53.912292 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-qhphp" event={"ID":"284db7f3-ca89-447a-90eb-487d43e49f7d","Type":"ContainerStarted","Data":"abeda7fce3bd90024ac8115547d27b4e8a167b6825bd38c60fbf63f646b23274"} Dec 05 17:32:53 crc kubenswrapper[4753]: I1205 17:32:53.912851 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-qhphp" Dec 05 17:32:53 crc kubenswrapper[4753]: I1205 17:32:53.912894 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-qhphp" Dec 05 17:32:53 crc kubenswrapper[4753]: I1205 17:32:53.914559 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"28078f95-1316-46c1-9dda-9912561aa4e4","Type":"ContainerStarted","Data":"2a0d5dd008371247a1e90fee309ca09991d490d0b2928249372bd98c5bb0f9fa"} Dec 05 17:32:53 crc kubenswrapper[4753]: I1205 17:32:53.914974 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 05 17:32:53 crc kubenswrapper[4753]: I1205 17:32:53.916356 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"9df69769-e394-444f-b6e2-e788e989fe92","Type":"ContainerStarted","Data":"c6726eb3c418185d28eb3de97023de39a7f6bba759f6f884dcb2dd6a9c3d8b12"} Dec 05 17:32:53 crc kubenswrapper[4753]: I1205 17:32:53.918048 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"f8c13e8e-fec9-49e2-a2b0-5ca0473d2469","Type":"ContainerStarted","Data":"bb9eb13e7801bb313bbafd8f611217f819ec442596ef486fce7a12fe42ad781a"} Dec 05 17:32:53 crc kubenswrapper[4753]: I1205 17:32:53.925818 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-57d769cc4f-rchhw" podStartSLOduration=11.193958336 podStartE2EDuration="59.925801231s" podCreationTimestamp="2025-12-05 17:31:54 +0000 UTC" firstStartedPulling="2025-12-05 17:31:55.574105477 +0000 UTC m=+1654.077212483" lastFinishedPulling="2025-12-05 17:32:44.305948372 +0000 UTC m=+1702.809055378" observedRunningTime="2025-12-05 17:32:53.92045765 +0000 UTC m=+1712.423564656" watchObservedRunningTime="2025-12-05 17:32:53.925801231 +0000 UTC m=+1712.428908237" Dec 05 17:32:53 crc kubenswrapper[4753]: I1205 17:32:53.939031 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"09e6b220-0a70-4359-93f4-4450b2e458c8","Type":"ContainerStarted","Data":"d6f9485219048e4908874d9956d80484a0a723b82c24d2270d34a96fec82be4a"} Dec 05 17:32:53 crc kubenswrapper[4753]: I1205 17:32:53.955361 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-qhphp" podStartSLOduration=28.842129932 podStartE2EDuration="49.955345258s" podCreationTimestamp="2025-12-05 17:32:04 +0000 UTC" firstStartedPulling="2025-12-05 17:32:23.19392281 +0000 UTC m=+1681.697029816" lastFinishedPulling="2025-12-05 17:32:44.307138126 +0000 UTC m=+1702.810245142" observedRunningTime="2025-12-05 17:32:53.95261358 +0000 UTC m=+1712.455720606" watchObservedRunningTime="2025-12-05 17:32:53.955345258 +0000 UTC m=+1712.458452264" Dec 05 17:32:54 crc kubenswrapper[4753]: I1205 17:32:54.010183 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=25.295260901 podStartE2EDuration="54.01016767s" podCreationTimestamp="2025-12-05 17:32:00 +0000 UTC" firstStartedPulling="2025-12-05 17:32:23.194055884 +0000 UTC m=+1681.697162880" lastFinishedPulling="2025-12-05 17:32:51.908962643 +0000 UTC m=+1710.412069649" observedRunningTime="2025-12-05 17:32:54.009784929 +0000 UTC m=+1712.512891935" watchObservedRunningTime="2025-12-05 17:32:54.01016767 +0000 UTC m=+1712.513274676" Dec 05 17:32:54 crc kubenswrapper[4753]: I1205 17:32:54.950444 4753 generic.go:334] "Generic (PLEG): container finished" podID="30fe3d68-2fa2-416b-abac-b804b056a8af" containerID="a96547d7875d188e09ac520613645bd3c27f92d9ef61c44c795730b8f1b4223e" exitCode=0 Dec 05 17:32:54 crc kubenswrapper[4753]: I1205 17:32:54.950536 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-rchhw" event={"ID":"30fe3d68-2fa2-416b-abac-b804b056a8af","Type":"ContainerDied","Data":"a96547d7875d188e09ac520613645bd3c27f92d9ef61c44c795730b8f1b4223e"} Dec 05 17:32:54 crc kubenswrapper[4753]: I1205 17:32:54.952402 4753 generic.go:334] "Generic (PLEG): container finished" podID="f5652c22-3bf2-454d-a4cf-fd0378f133b8" containerID="d2b7fe5d9b96c0331922682eb4b32643de6c7d8b758a699b13da54a8a9b025a1" exitCode=0 Dec 05 17:32:54 crc kubenswrapper[4753]: I1205 17:32:54.952501 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"f5652c22-3bf2-454d-a4cf-fd0378f133b8","Type":"ContainerDied","Data":"d2b7fe5d9b96c0331922682eb4b32643de6c7d8b758a699b13da54a8a9b025a1"} Dec 05 17:32:55 crc kubenswrapper[4753]: I1205 17:32:55.723170 4753 scope.go:117] "RemoveContainer" containerID="87682a74661e693e498cd793cc20d16fc9f4a3b8a1a6b54f10285e2dcd15eafd" Dec 05 17:32:55 crc kubenswrapper[4753]: E1205 17:32:55.723597 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 17:32:55 crc kubenswrapper[4753]: I1205 17:32:55.920493 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f4707e97-4f70-42d5-959e-1d2c8a9629e5-etc-swift\") pod \"swift-storage-0\" (UID: \"f4707e97-4f70-42d5-959e-1d2c8a9629e5\") " pod="openstack/swift-storage-0" Dec 05 17:32:55 crc kubenswrapper[4753]: E1205 17:32:55.920709 4753 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 05 17:32:55 crc kubenswrapper[4753]: E1205 17:32:55.921053 4753 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 05 17:32:55 crc kubenswrapper[4753]: E1205 17:32:55.921116 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/f4707e97-4f70-42d5-959e-1d2c8a9629e5-etc-swift podName:f4707e97-4f70-42d5-959e-1d2c8a9629e5 nodeName:}" failed. No retries permitted until 2025-12-05 17:32:59.921096579 +0000 UTC m=+1718.424203585 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/f4707e97-4f70-42d5-959e-1d2c8a9629e5-etc-swift") pod "swift-storage-0" (UID: "f4707e97-4f70-42d5-959e-1d2c8a9629e5") : configmap "swift-ring-files" not found Dec 05 17:32:55 crc kubenswrapper[4753]: I1205 17:32:55.962069 4753 generic.go:334] "Generic (PLEG): container finished" podID="36088f6e-7c3c-4fee-918e-e1ee91bf6b33" containerID="771faf157830e014422c7e771db96a6f041aee98cdf73b6a9d7d87a972150a7c" exitCode=0 Dec 05 17:32:55 crc kubenswrapper[4753]: I1205 17:32:55.962980 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"36088f6e-7c3c-4fee-918e-e1ee91bf6b33","Type":"ContainerDied","Data":"771faf157830e014422c7e771db96a6f041aee98cdf73b6a9d7d87a972150a7c"} Dec 05 17:32:56 crc kubenswrapper[4753]: I1205 17:32:56.001478 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-z4g8x"] Dec 05 17:32:56 crc kubenswrapper[4753]: I1205 17:32:56.003270 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-z4g8x" Dec 05 17:32:56 crc kubenswrapper[4753]: I1205 17:32:56.009514 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Dec 05 17:32:56 crc kubenswrapper[4753]: I1205 17:32:56.009745 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Dec 05 17:32:56 crc kubenswrapper[4753]: I1205 17:32:56.009896 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Dec 05 17:32:56 crc kubenswrapper[4753]: I1205 17:32:56.046220 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-z4g8x"] Dec 05 17:32:56 crc kubenswrapper[4753]: I1205 17:32:56.129979 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2-scripts\") pod \"swift-ring-rebalance-z4g8x\" (UID: \"3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2\") " pod="openstack/swift-ring-rebalance-z4g8x" Dec 05 17:32:56 crc kubenswrapper[4753]: I1205 17:32:56.130090 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cbj5v\" (UniqueName: \"kubernetes.io/projected/3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2-kube-api-access-cbj5v\") pod \"swift-ring-rebalance-z4g8x\" (UID: \"3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2\") " pod="openstack/swift-ring-rebalance-z4g8x" Dec 05 17:32:56 crc kubenswrapper[4753]: I1205 17:32:56.130240 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2-swiftconf\") pod \"swift-ring-rebalance-z4g8x\" (UID: \"3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2\") " pod="openstack/swift-ring-rebalance-z4g8x" Dec 05 17:32:56 crc kubenswrapper[4753]: I1205 17:32:56.130343 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2-ring-data-devices\") pod \"swift-ring-rebalance-z4g8x\" (UID: \"3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2\") " pod="openstack/swift-ring-rebalance-z4g8x" Dec 05 17:32:56 crc kubenswrapper[4753]: I1205 17:32:56.130375 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2-combined-ca-bundle\") pod \"swift-ring-rebalance-z4g8x\" (UID: \"3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2\") " pod="openstack/swift-ring-rebalance-z4g8x" Dec 05 17:32:56 crc kubenswrapper[4753]: I1205 17:32:56.130428 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2-etc-swift\") pod \"swift-ring-rebalance-z4g8x\" (UID: \"3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2\") " pod="openstack/swift-ring-rebalance-z4g8x" Dec 05 17:32:56 crc kubenswrapper[4753]: I1205 17:32:56.130480 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2-dispersionconf\") pod \"swift-ring-rebalance-z4g8x\" (UID: \"3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2\") " pod="openstack/swift-ring-rebalance-z4g8x" Dec 05 17:32:56 crc kubenswrapper[4753]: I1205 17:32:56.232828 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2-swiftconf\") pod \"swift-ring-rebalance-z4g8x\" (UID: \"3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2\") " pod="openstack/swift-ring-rebalance-z4g8x" Dec 05 17:32:56 crc kubenswrapper[4753]: I1205 17:32:56.232928 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2-ring-data-devices\") pod \"swift-ring-rebalance-z4g8x\" (UID: \"3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2\") " pod="openstack/swift-ring-rebalance-z4g8x" Dec 05 17:32:56 crc kubenswrapper[4753]: I1205 17:32:56.232952 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2-combined-ca-bundle\") pod \"swift-ring-rebalance-z4g8x\" (UID: \"3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2\") " pod="openstack/swift-ring-rebalance-z4g8x" Dec 05 17:32:56 crc kubenswrapper[4753]: I1205 17:32:56.232983 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2-etc-swift\") pod \"swift-ring-rebalance-z4g8x\" (UID: \"3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2\") " pod="openstack/swift-ring-rebalance-z4g8x" Dec 05 17:32:56 crc kubenswrapper[4753]: I1205 17:32:56.233019 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2-dispersionconf\") pod \"swift-ring-rebalance-z4g8x\" (UID: \"3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2\") " pod="openstack/swift-ring-rebalance-z4g8x" Dec 05 17:32:56 crc kubenswrapper[4753]: I1205 17:32:56.233098 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2-scripts\") pod \"swift-ring-rebalance-z4g8x\" (UID: \"3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2\") " pod="openstack/swift-ring-rebalance-z4g8x" Dec 05 17:32:56 crc kubenswrapper[4753]: I1205 17:32:56.233178 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cbj5v\" (UniqueName: \"kubernetes.io/projected/3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2-kube-api-access-cbj5v\") pod \"swift-ring-rebalance-z4g8x\" (UID: \"3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2\") " pod="openstack/swift-ring-rebalance-z4g8x" Dec 05 17:32:56 crc kubenswrapper[4753]: I1205 17:32:56.234604 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2-etc-swift\") pod \"swift-ring-rebalance-z4g8x\" (UID: \"3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2\") " pod="openstack/swift-ring-rebalance-z4g8x" Dec 05 17:32:56 crc kubenswrapper[4753]: I1205 17:32:56.235909 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2-ring-data-devices\") pod \"swift-ring-rebalance-z4g8x\" (UID: \"3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2\") " pod="openstack/swift-ring-rebalance-z4g8x" Dec 05 17:32:56 crc kubenswrapper[4753]: I1205 17:32:56.236531 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2-scripts\") pod \"swift-ring-rebalance-z4g8x\" (UID: \"3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2\") " pod="openstack/swift-ring-rebalance-z4g8x" Dec 05 17:32:56 crc kubenswrapper[4753]: I1205 17:32:56.237722 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2-dispersionconf\") pod \"swift-ring-rebalance-z4g8x\" (UID: \"3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2\") " pod="openstack/swift-ring-rebalance-z4g8x" Dec 05 17:32:56 crc kubenswrapper[4753]: I1205 17:32:56.239598 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2-swiftconf\") pod \"swift-ring-rebalance-z4g8x\" (UID: \"3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2\") " pod="openstack/swift-ring-rebalance-z4g8x" Dec 05 17:32:56 crc kubenswrapper[4753]: I1205 17:32:56.243943 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2-combined-ca-bundle\") pod \"swift-ring-rebalance-z4g8x\" (UID: \"3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2\") " pod="openstack/swift-ring-rebalance-z4g8x" Dec 05 17:32:56 crc kubenswrapper[4753]: I1205 17:32:56.279447 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cbj5v\" (UniqueName: \"kubernetes.io/projected/3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2-kube-api-access-cbj5v\") pod \"swift-ring-rebalance-z4g8x\" (UID: \"3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2\") " pod="openstack/swift-ring-rebalance-z4g8x" Dec 05 17:32:56 crc kubenswrapper[4753]: I1205 17:32:56.438379 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-z4g8x" Dec 05 17:32:56 crc kubenswrapper[4753]: I1205 17:32:56.456707 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-rchhw" Dec 05 17:32:56 crc kubenswrapper[4753]: I1205 17:32:56.549932 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/30fe3d68-2fa2-416b-abac-b804b056a8af-dns-svc\") pod \"30fe3d68-2fa2-416b-abac-b804b056a8af\" (UID: \"30fe3d68-2fa2-416b-abac-b804b056a8af\") " Dec 05 17:32:56 crc kubenswrapper[4753]: I1205 17:32:56.550036 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/30fe3d68-2fa2-416b-abac-b804b056a8af-config\") pod \"30fe3d68-2fa2-416b-abac-b804b056a8af\" (UID: \"30fe3d68-2fa2-416b-abac-b804b056a8af\") " Dec 05 17:32:56 crc kubenswrapper[4753]: I1205 17:32:56.550203 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v2sgt\" (UniqueName: \"kubernetes.io/projected/30fe3d68-2fa2-416b-abac-b804b056a8af-kube-api-access-v2sgt\") pod \"30fe3d68-2fa2-416b-abac-b804b056a8af\" (UID: \"30fe3d68-2fa2-416b-abac-b804b056a8af\") " Dec 05 17:32:56 crc kubenswrapper[4753]: I1205 17:32:56.560725 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/30fe3d68-2fa2-416b-abac-b804b056a8af-kube-api-access-v2sgt" (OuterVolumeSpecName: "kube-api-access-v2sgt") pod "30fe3d68-2fa2-416b-abac-b804b056a8af" (UID: "30fe3d68-2fa2-416b-abac-b804b056a8af"). InnerVolumeSpecName "kube-api-access-v2sgt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:32:56 crc kubenswrapper[4753]: I1205 17:32:56.609071 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/30fe3d68-2fa2-416b-abac-b804b056a8af-config" (OuterVolumeSpecName: "config") pod "30fe3d68-2fa2-416b-abac-b804b056a8af" (UID: "30fe3d68-2fa2-416b-abac-b804b056a8af"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:32:56 crc kubenswrapper[4753]: I1205 17:32:56.653762 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v2sgt\" (UniqueName: \"kubernetes.io/projected/30fe3d68-2fa2-416b-abac-b804b056a8af-kube-api-access-v2sgt\") on node \"crc\" DevicePath \"\"" Dec 05 17:32:56 crc kubenswrapper[4753]: I1205 17:32:56.653799 4753 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/30fe3d68-2fa2-416b-abac-b804b056a8af-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:32:56 crc kubenswrapper[4753]: I1205 17:32:56.688957 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/30fe3d68-2fa2-416b-abac-b804b056a8af-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "30fe3d68-2fa2-416b-abac-b804b056a8af" (UID: "30fe3d68-2fa2-416b-abac-b804b056a8af"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:32:56 crc kubenswrapper[4753]: I1205 17:32:56.756802 4753 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/30fe3d68-2fa2-416b-abac-b804b056a8af-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 17:32:56 crc kubenswrapper[4753]: I1205 17:32:56.995328 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-8m7cw" event={"ID":"9f33836c-96c6-4da3-b2d6-e9c12631f2b4","Type":"ContainerStarted","Data":"e9f31038028e0e0fad6fa2b9f67491d8b342f2054b195abd8fd0232def33ba7e"} Dec 05 17:32:56 crc kubenswrapper[4753]: I1205 17:32:56.996372 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-8m7cw" Dec 05 17:32:57 crc kubenswrapper[4753]: I1205 17:32:57.006485 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-rchhw" event={"ID":"30fe3d68-2fa2-416b-abac-b804b056a8af","Type":"ContainerDied","Data":"6c45f8bbde4ee5e19bf5eff6b7eebcf0610dcb40a5ad058f9094e0fb81bf2967"} Dec 05 17:32:57 crc kubenswrapper[4753]: I1205 17:32:57.006539 4753 scope.go:117] "RemoveContainer" containerID="a96547d7875d188e09ac520613645bd3c27f92d9ef61c44c795730b8f1b4223e" Dec 05 17:32:57 crc kubenswrapper[4753]: I1205 17:32:57.006538 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-rchhw" Dec 05 17:32:57 crc kubenswrapper[4753]: I1205 17:32:57.024630 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"f5652c22-3bf2-454d-a4cf-fd0378f133b8","Type":"ContainerStarted","Data":"3c247339d27c9ec5ba0733cc0a9a7a3d287053286490b1cea64952a8c4b81b32"} Dec 05 17:32:57 crc kubenswrapper[4753]: I1205 17:32:57.024973 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:32:57 crc kubenswrapper[4753]: I1205 17:32:57.032051 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-8m7cw" podStartSLOduration=19.1018 podStartE2EDuration="53.032027081s" podCreationTimestamp="2025-12-05 17:32:04 +0000 UTC" firstStartedPulling="2025-12-05 17:32:22.26880924 +0000 UTC m=+1680.771916246" lastFinishedPulling="2025-12-05 17:32:56.199036321 +0000 UTC m=+1714.702143327" observedRunningTime="2025-12-05 17:32:57.024644732 +0000 UTC m=+1715.527751738" watchObservedRunningTime="2025-12-05 17:32:57.032027081 +0000 UTC m=+1715.535134087" Dec 05 17:32:57 crc kubenswrapper[4753]: I1205 17:32:57.037374 4753 scope.go:117] "RemoveContainer" containerID="6a26f46620df0cc5d80e3e2f9b0b4e3062309fce0cb87bc87d6c51ecd3d77f7d" Dec 05 17:32:57 crc kubenswrapper[4753]: I1205 17:32:57.039237 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"4c8d8a7a-38bd-49d9-8f25-5495c32462bc","Type":"ContainerStarted","Data":"6019f09463e2f71df322a9effcf7e469c544557429b43a63171f2525c8d669e4"} Dec 05 17:32:57 crc kubenswrapper[4753]: I1205 17:32:57.045558 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"09e6b220-0a70-4359-93f4-4450b2e458c8","Type":"ContainerStarted","Data":"8f98b3e7d07964cc7e9fd1f6d2913d18678b11cc74129dd7aba0fd55129fdc29"} Dec 05 17:32:57 crc kubenswrapper[4753]: I1205 17:32:57.050870 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-z4g8x"] Dec 05 17:32:57 crc kubenswrapper[4753]: I1205 17:32:57.055007 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cb5889db5-9krvs" event={"ID":"a91f1699-f2d1-4335-8f7f-cb44dae997da","Type":"ContainerStarted","Data":"2da3a2f0122e24b1f2eb18963687f9177a7049ec3a69e3f139f90e86d370f924"} Dec 05 17:32:57 crc kubenswrapper[4753]: I1205 17:32:57.055549 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7cb5889db5-9krvs" Dec 05 17:32:57 crc kubenswrapper[4753]: I1205 17:32:57.072806 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/alertmanager-metric-storage-0" event={"ID":"f409c39c-6d5a-4950-bd92-2ab8a26ad831","Type":"ContainerStarted","Data":"a334be658f49f357587f3d2e7d838b71acda586de9f37af3d3097371cfe3e389"} Dec 05 17:32:57 crc kubenswrapper[4753]: I1205 17:32:57.073514 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/alertmanager-metric-storage-0" Dec 05 17:32:57 crc kubenswrapper[4753]: I1205 17:32:57.075356 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/alertmanager-metric-storage-0" Dec 05 17:32:57 crc kubenswrapper[4753]: I1205 17:32:57.075838 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"36088f6e-7c3c-4fee-918e-e1ee91bf6b33","Type":"ContainerStarted","Data":"10668d64fe2c712c2486402a9956161db31594987a12e3d235797dcb37d29bf6"} Dec 05 17:32:57 crc kubenswrapper[4753]: I1205 17:32:57.076217 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 05 17:32:57 crc kubenswrapper[4753]: I1205 17:32:57.084928 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"f8c13e8e-fec9-49e2-a2b0-5ca0473d2469","Type":"ContainerStarted","Data":"6ab4dbc1bd749122352b6d27ab567ec07b862356893262078ed93187911e202a"} Dec 05 17:32:57 crc kubenswrapper[4753]: I1205 17:32:57.089171 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"c44e1a93-e233-46a2-b18a-e6c8c396a394","Type":"ContainerStarted","Data":"7eb464d7df6e8d1abc098e2c5cdf2192069287ec1fec6846c7a1b7a928c9a1eb"} Dec 05 17:32:57 crc kubenswrapper[4753]: I1205 17:32:57.091043 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=44.414885619 podStartE2EDuration="1m2.091027592s" podCreationTimestamp="2025-12-05 17:31:55 +0000 UTC" firstStartedPulling="2025-12-05 17:32:03.062396973 +0000 UTC m=+1661.565503989" lastFinishedPulling="2025-12-05 17:32:20.738538946 +0000 UTC m=+1679.241645962" observedRunningTime="2025-12-05 17:32:57.081933474 +0000 UTC m=+1715.585040480" watchObservedRunningTime="2025-12-05 17:32:57.091027592 +0000 UTC m=+1715.594134598" Dec 05 17:32:57 crc kubenswrapper[4753]: I1205 17:32:57.119113 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-rchhw"] Dec 05 17:32:57 crc kubenswrapper[4753]: I1205 17:32:57.126523 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-rchhw"] Dec 05 17:32:57 crc kubenswrapper[4753]: I1205 17:32:57.167907 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=45.376012911 podStartE2EDuration="1m3.167892899s" podCreationTimestamp="2025-12-05 17:31:54 +0000 UTC" firstStartedPulling="2025-12-05 17:32:03.063533505 +0000 UTC m=+1661.566640541" lastFinishedPulling="2025-12-05 17:32:20.855413523 +0000 UTC m=+1679.358520529" observedRunningTime="2025-12-05 17:32:57.163658229 +0000 UTC m=+1715.666765235" watchObservedRunningTime="2025-12-05 17:32:57.167892899 +0000 UTC m=+1715.670999905" Dec 05 17:32:57 crc kubenswrapper[4753]: I1205 17:32:57.214209 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7cb5889db5-9krvs" podStartSLOduration=7.21418454 podStartE2EDuration="7.21418454s" podCreationTimestamp="2025-12-05 17:32:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:32:57.207861931 +0000 UTC m=+1715.710968937" watchObservedRunningTime="2025-12-05 17:32:57.21418454 +0000 UTC m=+1715.717291546" Dec 05 17:32:57 crc kubenswrapper[4753]: I1205 17:32:57.303389 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/alertmanager-metric-storage-0" podStartSLOduration=32.508795379 podStartE2EDuration="56.303373705s" podCreationTimestamp="2025-12-05 17:32:01 +0000 UTC" firstStartedPulling="2025-12-05 17:32:20.531426694 +0000 UTC m=+1679.034533720" lastFinishedPulling="2025-12-05 17:32:44.32600504 +0000 UTC m=+1702.829112046" observedRunningTime="2025-12-05 17:32:57.297100468 +0000 UTC m=+1715.800207474" watchObservedRunningTime="2025-12-05 17:32:57.303373705 +0000 UTC m=+1715.806480711" Dec 05 17:32:57 crc kubenswrapper[4753]: I1205 17:32:57.345306 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=21.04063129 podStartE2EDuration="54.345285782s" podCreationTimestamp="2025-12-05 17:32:03 +0000 UTC" firstStartedPulling="2025-12-05 17:32:23.083739891 +0000 UTC m=+1681.586846897" lastFinishedPulling="2025-12-05 17:32:56.388394373 +0000 UTC m=+1714.891501389" observedRunningTime="2025-12-05 17:32:57.328230609 +0000 UTC m=+1715.831337635" watchObservedRunningTime="2025-12-05 17:32:57.345285782 +0000 UTC m=+1715.848392788" Dec 05 17:32:57 crc kubenswrapper[4753]: I1205 17:32:57.352360 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=15.437258239 podStartE2EDuration="49.352339242s" podCreationTimestamp="2025-12-05 17:32:08 +0000 UTC" firstStartedPulling="2025-12-05 17:32:22.477471517 +0000 UTC m=+1680.980578523" lastFinishedPulling="2025-12-05 17:32:56.39255252 +0000 UTC m=+1714.895659526" observedRunningTime="2025-12-05 17:32:57.349839721 +0000 UTC m=+1715.852946747" watchObservedRunningTime="2025-12-05 17:32:57.352339242 +0000 UTC m=+1715.855446248" Dec 05 17:32:57 crc kubenswrapper[4753]: I1205 17:32:57.731508 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="30fe3d68-2fa2-416b-abac-b804b056a8af" path="/var/lib/kubelet/pods/30fe3d68-2fa2-416b-abac-b804b056a8af/volumes" Dec 05 17:32:58 crc kubenswrapper[4753]: I1205 17:32:58.101984 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-z4g8x" event={"ID":"3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2","Type":"ContainerStarted","Data":"78f01af2815ef193d82323e8d496f76789ec4044f4fadc5091494043aea57f67"} Dec 05 17:32:58 crc kubenswrapper[4753]: I1205 17:32:58.124858 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Dec 05 17:32:59 crc kubenswrapper[4753]: I1205 17:32:59.087814 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Dec 05 17:32:59 crc kubenswrapper[4753]: I1205 17:32:59.108808 4753 generic.go:334] "Generic (PLEG): container finished" podID="9df69769-e394-444f-b6e2-e788e989fe92" containerID="c6726eb3c418185d28eb3de97023de39a7f6bba759f6f884dcb2dd6a9c3d8b12" exitCode=0 Dec 05 17:32:59 crc kubenswrapper[4753]: I1205 17:32:59.109628 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"9df69769-e394-444f-b6e2-e788e989fe92","Type":"ContainerDied","Data":"c6726eb3c418185d28eb3de97023de39a7f6bba759f6f884dcb2dd6a9c3d8b12"} Dec 05 17:32:59 crc kubenswrapper[4753]: I1205 17:32:59.154164 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Dec 05 17:32:59 crc kubenswrapper[4753]: I1205 17:32:59.940823 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f4707e97-4f70-42d5-959e-1d2c8a9629e5-etc-swift\") pod \"swift-storage-0\" (UID: \"f4707e97-4f70-42d5-959e-1d2c8a9629e5\") " pod="openstack/swift-storage-0" Dec 05 17:32:59 crc kubenswrapper[4753]: E1205 17:32:59.940992 4753 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 05 17:32:59 crc kubenswrapper[4753]: E1205 17:32:59.941012 4753 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 05 17:32:59 crc kubenswrapper[4753]: E1205 17:32:59.941061 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/f4707e97-4f70-42d5-959e-1d2c8a9629e5-etc-swift podName:f4707e97-4f70-42d5-959e-1d2c8a9629e5 nodeName:}" failed. No retries permitted until 2025-12-05 17:33:07.941046897 +0000 UTC m=+1726.444153903 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/f4707e97-4f70-42d5-959e-1d2c8a9629e5-etc-swift") pod "swift-storage-0" (UID: "f4707e97-4f70-42d5-959e-1d2c8a9629e5") : configmap "swift-ring-files" not found Dec 05 17:33:00 crc kubenswrapper[4753]: I1205 17:33:00.087028 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Dec 05 17:33:00 crc kubenswrapper[4753]: I1205 17:33:00.124929 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Dec 05 17:33:00 crc kubenswrapper[4753]: I1205 17:33:00.129156 4753 generic.go:334] "Generic (PLEG): container finished" podID="4c8d8a7a-38bd-49d9-8f25-5495c32462bc" containerID="6019f09463e2f71df322a9effcf7e469c544557429b43a63171f2525c8d669e4" exitCode=0 Dec 05 17:33:00 crc kubenswrapper[4753]: I1205 17:33:00.131558 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"4c8d8a7a-38bd-49d9-8f25-5495c32462bc","Type":"ContainerDied","Data":"6019f09463e2f71df322a9effcf7e469c544557429b43a63171f2525c8d669e4"} Dec 05 17:33:00 crc kubenswrapper[4753]: I1205 17:33:00.939775 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.182201 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.187565 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.238508 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.286388 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7cb5889db5-9krvs" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.360574 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-vznhm"] Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.459216 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57d65f699f-l2m96"] Dec 05 17:33:01 crc kubenswrapper[4753]: E1205 17:33:01.459738 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30fe3d68-2fa2-416b-abac-b804b056a8af" containerName="init" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.459757 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="30fe3d68-2fa2-416b-abac-b804b056a8af" containerName="init" Dec 05 17:33:01 crc kubenswrapper[4753]: E1205 17:33:01.459801 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30fe3d68-2fa2-416b-abac-b804b056a8af" containerName="dnsmasq-dns" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.459810 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="30fe3d68-2fa2-416b-abac-b804b056a8af" containerName="dnsmasq-dns" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.460056 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="30fe3d68-2fa2-416b-abac-b804b056a8af" containerName="dnsmasq-dns" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.461512 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d65f699f-l2m96" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.464555 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.483098 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d65f699f-l2m96"] Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.568036 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-sp59x"] Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.569287 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-sp59x" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.576454 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.586800 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/091fce24-9317-4efb-8ee3-ebb38f200312-config\") pod \"dnsmasq-dns-57d65f699f-l2m96\" (UID: \"091fce24-9317-4efb-8ee3-ebb38f200312\") " pod="openstack/dnsmasq-dns-57d65f699f-l2m96" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.587135 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/091fce24-9317-4efb-8ee3-ebb38f200312-ovsdbserver-nb\") pod \"dnsmasq-dns-57d65f699f-l2m96\" (UID: \"091fce24-9317-4efb-8ee3-ebb38f200312\") " pod="openstack/dnsmasq-dns-57d65f699f-l2m96" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.587199 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n6pj2\" (UniqueName: \"kubernetes.io/projected/091fce24-9317-4efb-8ee3-ebb38f200312-kube-api-access-n6pj2\") pod \"dnsmasq-dns-57d65f699f-l2m96\" (UID: \"091fce24-9317-4efb-8ee3-ebb38f200312\") " pod="openstack/dnsmasq-dns-57d65f699f-l2m96" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.587265 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/091fce24-9317-4efb-8ee3-ebb38f200312-dns-svc\") pod \"dnsmasq-dns-57d65f699f-l2m96\" (UID: \"091fce24-9317-4efb-8ee3-ebb38f200312\") " pod="openstack/dnsmasq-dns-57d65f699f-l2m96" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.593914 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-sp59x"] Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.693417 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/091fce24-9317-4efb-8ee3-ebb38f200312-dns-svc\") pod \"dnsmasq-dns-57d65f699f-l2m96\" (UID: \"091fce24-9317-4efb-8ee3-ebb38f200312\") " pod="openstack/dnsmasq-dns-57d65f699f-l2m96" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.693493 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85fb40be-ac2e-404f-912b-2831ae6eb795-combined-ca-bundle\") pod \"ovn-controller-metrics-sp59x\" (UID: \"85fb40be-ac2e-404f-912b-2831ae6eb795\") " pod="openstack/ovn-controller-metrics-sp59x" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.693517 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/85fb40be-ac2e-404f-912b-2831ae6eb795-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-sp59x\" (UID: \"85fb40be-ac2e-404f-912b-2831ae6eb795\") " pod="openstack/ovn-controller-metrics-sp59x" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.693555 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/091fce24-9317-4efb-8ee3-ebb38f200312-config\") pod \"dnsmasq-dns-57d65f699f-l2m96\" (UID: \"091fce24-9317-4efb-8ee3-ebb38f200312\") " pod="openstack/dnsmasq-dns-57d65f699f-l2m96" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.693623 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/85fb40be-ac2e-404f-912b-2831ae6eb795-ovs-rundir\") pod \"ovn-controller-metrics-sp59x\" (UID: \"85fb40be-ac2e-404f-912b-2831ae6eb795\") " pod="openstack/ovn-controller-metrics-sp59x" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.693658 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/85fb40be-ac2e-404f-912b-2831ae6eb795-ovn-rundir\") pod \"ovn-controller-metrics-sp59x\" (UID: \"85fb40be-ac2e-404f-912b-2831ae6eb795\") " pod="openstack/ovn-controller-metrics-sp59x" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.693682 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v9dnd\" (UniqueName: \"kubernetes.io/projected/85fb40be-ac2e-404f-912b-2831ae6eb795-kube-api-access-v9dnd\") pod \"ovn-controller-metrics-sp59x\" (UID: \"85fb40be-ac2e-404f-912b-2831ae6eb795\") " pod="openstack/ovn-controller-metrics-sp59x" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.693705 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/091fce24-9317-4efb-8ee3-ebb38f200312-ovsdbserver-nb\") pod \"dnsmasq-dns-57d65f699f-l2m96\" (UID: \"091fce24-9317-4efb-8ee3-ebb38f200312\") " pod="openstack/dnsmasq-dns-57d65f699f-l2m96" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.693723 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n6pj2\" (UniqueName: \"kubernetes.io/projected/091fce24-9317-4efb-8ee3-ebb38f200312-kube-api-access-n6pj2\") pod \"dnsmasq-dns-57d65f699f-l2m96\" (UID: \"091fce24-9317-4efb-8ee3-ebb38f200312\") " pod="openstack/dnsmasq-dns-57d65f699f-l2m96" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.693749 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/85fb40be-ac2e-404f-912b-2831ae6eb795-config\") pod \"ovn-controller-metrics-sp59x\" (UID: \"85fb40be-ac2e-404f-912b-2831ae6eb795\") " pod="openstack/ovn-controller-metrics-sp59x" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.694640 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/091fce24-9317-4efb-8ee3-ebb38f200312-dns-svc\") pod \"dnsmasq-dns-57d65f699f-l2m96\" (UID: \"091fce24-9317-4efb-8ee3-ebb38f200312\") " pod="openstack/dnsmasq-dns-57d65f699f-l2m96" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.694705 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/091fce24-9317-4efb-8ee3-ebb38f200312-ovsdbserver-nb\") pod \"dnsmasq-dns-57d65f699f-l2m96\" (UID: \"091fce24-9317-4efb-8ee3-ebb38f200312\") " pod="openstack/dnsmasq-dns-57d65f699f-l2m96" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.695292 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/091fce24-9317-4efb-8ee3-ebb38f200312-config\") pod \"dnsmasq-dns-57d65f699f-l2m96\" (UID: \"091fce24-9317-4efb-8ee3-ebb38f200312\") " pod="openstack/dnsmasq-dns-57d65f699f-l2m96" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.712109 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d65f699f-l2m96"] Dec 05 17:33:01 crc kubenswrapper[4753]: E1205 17:33:01.717849 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[kube-api-access-n6pj2], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/dnsmasq-dns-57d65f699f-l2m96" podUID="091fce24-9317-4efb-8ee3-ebb38f200312" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.748165 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n6pj2\" (UniqueName: \"kubernetes.io/projected/091fce24-9317-4efb-8ee3-ebb38f200312-kube-api-access-n6pj2\") pod \"dnsmasq-dns-57d65f699f-l2m96\" (UID: \"091fce24-9317-4efb-8ee3-ebb38f200312\") " pod="openstack/dnsmasq-dns-57d65f699f-l2m96" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.755588 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.757103 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.787766 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.787953 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-wft5g" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.788059 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.788179 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.800398 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a2c3f794-ac1f-4115-bf82-a43f3a487332-config\") pod \"ovn-northd-0\" (UID: \"a2c3f794-ac1f-4115-bf82-a43f3a487332\") " pod="openstack/ovn-northd-0" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.800429 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a2c3f794-ac1f-4115-bf82-a43f3a487332-scripts\") pod \"ovn-northd-0\" (UID: \"a2c3f794-ac1f-4115-bf82-a43f3a487332\") " pod="openstack/ovn-northd-0" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.800460 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2c3f794-ac1f-4115-bf82-a43f3a487332-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"a2c3f794-ac1f-4115-bf82-a43f3a487332\") " pod="openstack/ovn-northd-0" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.800490 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85fb40be-ac2e-404f-912b-2831ae6eb795-combined-ca-bundle\") pod \"ovn-controller-metrics-sp59x\" (UID: \"85fb40be-ac2e-404f-912b-2831ae6eb795\") " pod="openstack/ovn-controller-metrics-sp59x" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.800509 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/85fb40be-ac2e-404f-912b-2831ae6eb795-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-sp59x\" (UID: \"85fb40be-ac2e-404f-912b-2831ae6eb795\") " pod="openstack/ovn-controller-metrics-sp59x" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.800540 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jdt2h\" (UniqueName: \"kubernetes.io/projected/a2c3f794-ac1f-4115-bf82-a43f3a487332-kube-api-access-jdt2h\") pod \"ovn-northd-0\" (UID: \"a2c3f794-ac1f-4115-bf82-a43f3a487332\") " pod="openstack/ovn-northd-0" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.800592 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/a2c3f794-ac1f-4115-bf82-a43f3a487332-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"a2c3f794-ac1f-4115-bf82-a43f3a487332\") " pod="openstack/ovn-northd-0" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.800621 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/a2c3f794-ac1f-4115-bf82-a43f3a487332-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"a2c3f794-ac1f-4115-bf82-a43f3a487332\") " pod="openstack/ovn-northd-0" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.800644 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/85fb40be-ac2e-404f-912b-2831ae6eb795-ovs-rundir\") pod \"ovn-controller-metrics-sp59x\" (UID: \"85fb40be-ac2e-404f-912b-2831ae6eb795\") " pod="openstack/ovn-controller-metrics-sp59x" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.800678 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/85fb40be-ac2e-404f-912b-2831ae6eb795-ovn-rundir\") pod \"ovn-controller-metrics-sp59x\" (UID: \"85fb40be-ac2e-404f-912b-2831ae6eb795\") " pod="openstack/ovn-controller-metrics-sp59x" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.800700 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v9dnd\" (UniqueName: \"kubernetes.io/projected/85fb40be-ac2e-404f-912b-2831ae6eb795-kube-api-access-v9dnd\") pod \"ovn-controller-metrics-sp59x\" (UID: \"85fb40be-ac2e-404f-912b-2831ae6eb795\") " pod="openstack/ovn-controller-metrics-sp59x" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.800719 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/a2c3f794-ac1f-4115-bf82-a43f3a487332-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"a2c3f794-ac1f-4115-bf82-a43f3a487332\") " pod="openstack/ovn-northd-0" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.800745 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/85fb40be-ac2e-404f-912b-2831ae6eb795-config\") pod \"ovn-controller-metrics-sp59x\" (UID: \"85fb40be-ac2e-404f-912b-2831ae6eb795\") " pod="openstack/ovn-controller-metrics-sp59x" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.801449 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/85fb40be-ac2e-404f-912b-2831ae6eb795-config\") pod \"ovn-controller-metrics-sp59x\" (UID: \"85fb40be-ac2e-404f-912b-2831ae6eb795\") " pod="openstack/ovn-controller-metrics-sp59x" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.806237 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85fb40be-ac2e-404f-912b-2831ae6eb795-combined-ca-bundle\") pod \"ovn-controller-metrics-sp59x\" (UID: \"85fb40be-ac2e-404f-912b-2831ae6eb795\") " pod="openstack/ovn-controller-metrics-sp59x" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.809591 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/85fb40be-ac2e-404f-912b-2831ae6eb795-ovs-rundir\") pod \"ovn-controller-metrics-sp59x\" (UID: \"85fb40be-ac2e-404f-912b-2831ae6eb795\") " pod="openstack/ovn-controller-metrics-sp59x" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.810009 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/85fb40be-ac2e-404f-912b-2831ae6eb795-ovn-rundir\") pod \"ovn-controller-metrics-sp59x\" (UID: \"85fb40be-ac2e-404f-912b-2831ae6eb795\") " pod="openstack/ovn-controller-metrics-sp59x" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.812813 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/85fb40be-ac2e-404f-912b-2831ae6eb795-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-sp59x\" (UID: \"85fb40be-ac2e-404f-912b-2831ae6eb795\") " pod="openstack/ovn-controller-metrics-sp59x" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.827195 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.835047 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-562p5"] Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.836561 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-562p5" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.855289 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.880691 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v9dnd\" (UniqueName: \"kubernetes.io/projected/85fb40be-ac2e-404f-912b-2831ae6eb795-kube-api-access-v9dnd\") pod \"ovn-controller-metrics-sp59x\" (UID: \"85fb40be-ac2e-404f-912b-2831ae6eb795\") " pod="openstack/ovn-controller-metrics-sp59x" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.910948 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/a2c3f794-ac1f-4115-bf82-a43f3a487332-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"a2c3f794-ac1f-4115-bf82-a43f3a487332\") " pod="openstack/ovn-northd-0" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.911040 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e1e33c8a-695c-42cb-9e55-3a5346413faf-ovsdbserver-sb\") pod \"dnsmasq-dns-b8fbc5445-562p5\" (UID: \"e1e33c8a-695c-42cb-9e55-3a5346413faf\") " pod="openstack/dnsmasq-dns-b8fbc5445-562p5" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.911123 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/a2c3f794-ac1f-4115-bf82-a43f3a487332-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"a2c3f794-ac1f-4115-bf82-a43f3a487332\") " pod="openstack/ovn-northd-0" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.911411 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e1e33c8a-695c-42cb-9e55-3a5346413faf-config\") pod \"dnsmasq-dns-b8fbc5445-562p5\" (UID: \"e1e33c8a-695c-42cb-9e55-3a5346413faf\") " pod="openstack/dnsmasq-dns-b8fbc5445-562p5" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.911631 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/a2c3f794-ac1f-4115-bf82-a43f3a487332-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"a2c3f794-ac1f-4115-bf82-a43f3a487332\") " pod="openstack/ovn-northd-0" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.911997 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/a2c3f794-ac1f-4115-bf82-a43f3a487332-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"a2c3f794-ac1f-4115-bf82-a43f3a487332\") " pod="openstack/ovn-northd-0" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.912052 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e1e33c8a-695c-42cb-9e55-3a5346413faf-ovsdbserver-nb\") pod \"dnsmasq-dns-b8fbc5445-562p5\" (UID: \"e1e33c8a-695c-42cb-9e55-3a5346413faf\") " pod="openstack/dnsmasq-dns-b8fbc5445-562p5" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.912139 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a2c3f794-ac1f-4115-bf82-a43f3a487332-config\") pod \"ovn-northd-0\" (UID: \"a2c3f794-ac1f-4115-bf82-a43f3a487332\") " pod="openstack/ovn-northd-0" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.912216 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a2c3f794-ac1f-4115-bf82-a43f3a487332-scripts\") pod \"ovn-northd-0\" (UID: \"a2c3f794-ac1f-4115-bf82-a43f3a487332\") " pod="openstack/ovn-northd-0" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.912249 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2c3f794-ac1f-4115-bf82-a43f3a487332-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"a2c3f794-ac1f-4115-bf82-a43f3a487332\") " pod="openstack/ovn-northd-0" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.912314 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jdt2h\" (UniqueName: \"kubernetes.io/projected/a2c3f794-ac1f-4115-bf82-a43f3a487332-kube-api-access-jdt2h\") pod \"ovn-northd-0\" (UID: \"a2c3f794-ac1f-4115-bf82-a43f3a487332\") " pod="openstack/ovn-northd-0" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.912397 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8m8s6\" (UniqueName: \"kubernetes.io/projected/e1e33c8a-695c-42cb-9e55-3a5346413faf-kube-api-access-8m8s6\") pod \"dnsmasq-dns-b8fbc5445-562p5\" (UID: \"e1e33c8a-695c-42cb-9e55-3a5346413faf\") " pod="openstack/dnsmasq-dns-b8fbc5445-562p5" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.912431 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e1e33c8a-695c-42cb-9e55-3a5346413faf-dns-svc\") pod \"dnsmasq-dns-b8fbc5445-562p5\" (UID: \"e1e33c8a-695c-42cb-9e55-3a5346413faf\") " pod="openstack/dnsmasq-dns-b8fbc5445-562p5" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.913138 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a2c3f794-ac1f-4115-bf82-a43f3a487332-config\") pod \"ovn-northd-0\" (UID: \"a2c3f794-ac1f-4115-bf82-a43f3a487332\") " pod="openstack/ovn-northd-0" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.914806 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a2c3f794-ac1f-4115-bf82-a43f3a487332-scripts\") pod \"ovn-northd-0\" (UID: \"a2c3f794-ac1f-4115-bf82-a43f3a487332\") " pod="openstack/ovn-northd-0" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.916897 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/a2c3f794-ac1f-4115-bf82-a43f3a487332-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"a2c3f794-ac1f-4115-bf82-a43f3a487332\") " pod="openstack/ovn-northd-0" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.917511 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/a2c3f794-ac1f-4115-bf82-a43f3a487332-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"a2c3f794-ac1f-4115-bf82-a43f3a487332\") " pod="openstack/ovn-northd-0" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.919061 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2c3f794-ac1f-4115-bf82-a43f3a487332-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"a2c3f794-ac1f-4115-bf82-a43f3a487332\") " pod="openstack/ovn-northd-0" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.919525 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-562p5"] Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.919673 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-sp59x" Dec 05 17:33:01 crc kubenswrapper[4753]: I1205 17:33:01.958891 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jdt2h\" (UniqueName: \"kubernetes.io/projected/a2c3f794-ac1f-4115-bf82-a43f3a487332-kube-api-access-jdt2h\") pod \"ovn-northd-0\" (UID: \"a2c3f794-ac1f-4115-bf82-a43f3a487332\") " pod="openstack/ovn-northd-0" Dec 05 17:33:02 crc kubenswrapper[4753]: I1205 17:33:02.005973 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 05 17:33:02 crc kubenswrapper[4753]: I1205 17:33:02.014407 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e1e33c8a-695c-42cb-9e55-3a5346413faf-config\") pod \"dnsmasq-dns-b8fbc5445-562p5\" (UID: \"e1e33c8a-695c-42cb-9e55-3a5346413faf\") " pod="openstack/dnsmasq-dns-b8fbc5445-562p5" Dec 05 17:33:02 crc kubenswrapper[4753]: I1205 17:33:02.014638 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e1e33c8a-695c-42cb-9e55-3a5346413faf-ovsdbserver-nb\") pod \"dnsmasq-dns-b8fbc5445-562p5\" (UID: \"e1e33c8a-695c-42cb-9e55-3a5346413faf\") " pod="openstack/dnsmasq-dns-b8fbc5445-562p5" Dec 05 17:33:02 crc kubenswrapper[4753]: I1205 17:33:02.014962 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8m8s6\" (UniqueName: \"kubernetes.io/projected/e1e33c8a-695c-42cb-9e55-3a5346413faf-kube-api-access-8m8s6\") pod \"dnsmasq-dns-b8fbc5445-562p5\" (UID: \"e1e33c8a-695c-42cb-9e55-3a5346413faf\") " pod="openstack/dnsmasq-dns-b8fbc5445-562p5" Dec 05 17:33:02 crc kubenswrapper[4753]: I1205 17:33:02.015071 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e1e33c8a-695c-42cb-9e55-3a5346413faf-dns-svc\") pod \"dnsmasq-dns-b8fbc5445-562p5\" (UID: \"e1e33c8a-695c-42cb-9e55-3a5346413faf\") " pod="openstack/dnsmasq-dns-b8fbc5445-562p5" Dec 05 17:33:02 crc kubenswrapper[4753]: I1205 17:33:02.015185 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e1e33c8a-695c-42cb-9e55-3a5346413faf-ovsdbserver-sb\") pod \"dnsmasq-dns-b8fbc5445-562p5\" (UID: \"e1e33c8a-695c-42cb-9e55-3a5346413faf\") " pod="openstack/dnsmasq-dns-b8fbc5445-562p5" Dec 05 17:33:02 crc kubenswrapper[4753]: I1205 17:33:02.015889 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e1e33c8a-695c-42cb-9e55-3a5346413faf-dns-svc\") pod \"dnsmasq-dns-b8fbc5445-562p5\" (UID: \"e1e33c8a-695c-42cb-9e55-3a5346413faf\") " pod="openstack/dnsmasq-dns-b8fbc5445-562p5" Dec 05 17:33:02 crc kubenswrapper[4753]: I1205 17:33:02.016552 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e1e33c8a-695c-42cb-9e55-3a5346413faf-ovsdbserver-nb\") pod \"dnsmasq-dns-b8fbc5445-562p5\" (UID: \"e1e33c8a-695c-42cb-9e55-3a5346413faf\") " pod="openstack/dnsmasq-dns-b8fbc5445-562p5" Dec 05 17:33:02 crc kubenswrapper[4753]: I1205 17:33:02.016694 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e1e33c8a-695c-42cb-9e55-3a5346413faf-ovsdbserver-sb\") pod \"dnsmasq-dns-b8fbc5445-562p5\" (UID: \"e1e33c8a-695c-42cb-9e55-3a5346413faf\") " pod="openstack/dnsmasq-dns-b8fbc5445-562p5" Dec 05 17:33:02 crc kubenswrapper[4753]: I1205 17:33:02.019950 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e1e33c8a-695c-42cb-9e55-3a5346413faf-config\") pod \"dnsmasq-dns-b8fbc5445-562p5\" (UID: \"e1e33c8a-695c-42cb-9e55-3a5346413faf\") " pod="openstack/dnsmasq-dns-b8fbc5445-562p5" Dec 05 17:33:02 crc kubenswrapper[4753]: I1205 17:33:02.071031 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8m8s6\" (UniqueName: \"kubernetes.io/projected/e1e33c8a-695c-42cb-9e55-3a5346413faf-kube-api-access-8m8s6\") pod \"dnsmasq-dns-b8fbc5445-562p5\" (UID: \"e1e33c8a-695c-42cb-9e55-3a5346413faf\") " pod="openstack/dnsmasq-dns-b8fbc5445-562p5" Dec 05 17:33:02 crc kubenswrapper[4753]: I1205 17:33:02.153381 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d65f699f-l2m96" Dec 05 17:33:02 crc kubenswrapper[4753]: I1205 17:33:02.191130 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d65f699f-l2m96" Dec 05 17:33:02 crc kubenswrapper[4753]: I1205 17:33:02.320591 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/091fce24-9317-4efb-8ee3-ebb38f200312-dns-svc\") pod \"091fce24-9317-4efb-8ee3-ebb38f200312\" (UID: \"091fce24-9317-4efb-8ee3-ebb38f200312\") " Dec 05 17:33:02 crc kubenswrapper[4753]: I1205 17:33:02.320789 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/091fce24-9317-4efb-8ee3-ebb38f200312-config\") pod \"091fce24-9317-4efb-8ee3-ebb38f200312\" (UID: \"091fce24-9317-4efb-8ee3-ebb38f200312\") " Dec 05 17:33:02 crc kubenswrapper[4753]: I1205 17:33:02.320897 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/091fce24-9317-4efb-8ee3-ebb38f200312-ovsdbserver-nb\") pod \"091fce24-9317-4efb-8ee3-ebb38f200312\" (UID: \"091fce24-9317-4efb-8ee3-ebb38f200312\") " Dec 05 17:33:02 crc kubenswrapper[4753]: I1205 17:33:02.320937 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n6pj2\" (UniqueName: \"kubernetes.io/projected/091fce24-9317-4efb-8ee3-ebb38f200312-kube-api-access-n6pj2\") pod \"091fce24-9317-4efb-8ee3-ebb38f200312\" (UID: \"091fce24-9317-4efb-8ee3-ebb38f200312\") " Dec 05 17:33:02 crc kubenswrapper[4753]: I1205 17:33:02.321055 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/091fce24-9317-4efb-8ee3-ebb38f200312-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "091fce24-9317-4efb-8ee3-ebb38f200312" (UID: "091fce24-9317-4efb-8ee3-ebb38f200312"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:02 crc kubenswrapper[4753]: I1205 17:33:02.321317 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/091fce24-9317-4efb-8ee3-ebb38f200312-config" (OuterVolumeSpecName: "config") pod "091fce24-9317-4efb-8ee3-ebb38f200312" (UID: "091fce24-9317-4efb-8ee3-ebb38f200312"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:02 crc kubenswrapper[4753]: I1205 17:33:02.321480 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/091fce24-9317-4efb-8ee3-ebb38f200312-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "091fce24-9317-4efb-8ee3-ebb38f200312" (UID: "091fce24-9317-4efb-8ee3-ebb38f200312"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:02 crc kubenswrapper[4753]: I1205 17:33:02.322155 4753 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/091fce24-9317-4efb-8ee3-ebb38f200312-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:02 crc kubenswrapper[4753]: I1205 17:33:02.322459 4753 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/091fce24-9317-4efb-8ee3-ebb38f200312-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:02 crc kubenswrapper[4753]: I1205 17:33:02.322478 4753 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/091fce24-9317-4efb-8ee3-ebb38f200312-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:02 crc kubenswrapper[4753]: I1205 17:33:02.325370 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/091fce24-9317-4efb-8ee3-ebb38f200312-kube-api-access-n6pj2" (OuterVolumeSpecName: "kube-api-access-n6pj2") pod "091fce24-9317-4efb-8ee3-ebb38f200312" (UID: "091fce24-9317-4efb-8ee3-ebb38f200312"). InnerVolumeSpecName "kube-api-access-n6pj2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:02 crc kubenswrapper[4753]: I1205 17:33:02.332679 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-562p5" Dec 05 17:33:02 crc kubenswrapper[4753]: I1205 17:33:02.424557 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n6pj2\" (UniqueName: \"kubernetes.io/projected/091fce24-9317-4efb-8ee3-ebb38f200312-kube-api-access-n6pj2\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:02 crc kubenswrapper[4753]: I1205 17:33:02.976320 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-vznhm" Dec 05 17:33:03 crc kubenswrapper[4753]: I1205 17:33:03.168330 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/22d4d35e-da87-4f57-a984-0bd8a4b10a3f-dns-svc\") pod \"22d4d35e-da87-4f57-a984-0bd8a4b10a3f\" (UID: \"22d4d35e-da87-4f57-a984-0bd8a4b10a3f\") " Dec 05 17:33:03 crc kubenswrapper[4753]: I1205 17:33:03.168406 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g8lr4\" (UniqueName: \"kubernetes.io/projected/22d4d35e-da87-4f57-a984-0bd8a4b10a3f-kube-api-access-g8lr4\") pod \"22d4d35e-da87-4f57-a984-0bd8a4b10a3f\" (UID: \"22d4d35e-da87-4f57-a984-0bd8a4b10a3f\") " Dec 05 17:33:03 crc kubenswrapper[4753]: I1205 17:33:03.168502 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22d4d35e-da87-4f57-a984-0bd8a4b10a3f-config\") pod \"22d4d35e-da87-4f57-a984-0bd8a4b10a3f\" (UID: \"22d4d35e-da87-4f57-a984-0bd8a4b10a3f\") " Dec 05 17:33:03 crc kubenswrapper[4753]: I1205 17:33:03.175694 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22d4d35e-da87-4f57-a984-0bd8a4b10a3f-kube-api-access-g8lr4" (OuterVolumeSpecName: "kube-api-access-g8lr4") pod "22d4d35e-da87-4f57-a984-0bd8a4b10a3f" (UID: "22d4d35e-da87-4f57-a984-0bd8a4b10a3f"). InnerVolumeSpecName "kube-api-access-g8lr4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:03 crc kubenswrapper[4753]: I1205 17:33:03.186231 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-lokistack-distributor-664b687b54-jj67d" Dec 05 17:33:03 crc kubenswrapper[4753]: I1205 17:33:03.214198 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d65f699f-l2m96" Dec 05 17:33:03 crc kubenswrapper[4753]: I1205 17:33:03.214691 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-vznhm" Dec 05 17:33:03 crc kubenswrapper[4753]: I1205 17:33:03.215199 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-vznhm" event={"ID":"22d4d35e-da87-4f57-a984-0bd8a4b10a3f","Type":"ContainerDied","Data":"34244e18449a7b1dfec03911f275735a658e5e78ae586ab4f95c0c8f46d7e8b3"} Dec 05 17:33:03 crc kubenswrapper[4753]: I1205 17:33:03.215245 4753 scope.go:117] "RemoveContainer" containerID="7c44e1eab9372fb992da2ce929672610038b4adfc4c7a3b5a9ad6c81dd3f3c16" Dec 05 17:33:03 crc kubenswrapper[4753]: I1205 17:33:03.237184 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22d4d35e-da87-4f57-a984-0bd8a4b10a3f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "22d4d35e-da87-4f57-a984-0bd8a4b10a3f" (UID: "22d4d35e-da87-4f57-a984-0bd8a4b10a3f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:03 crc kubenswrapper[4753]: I1205 17:33:03.271251 4753 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/22d4d35e-da87-4f57-a984-0bd8a4b10a3f-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:03 crc kubenswrapper[4753]: I1205 17:33:03.273269 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22d4d35e-da87-4f57-a984-0bd8a4b10a3f-config" (OuterVolumeSpecName: "config") pod "22d4d35e-da87-4f57-a984-0bd8a4b10a3f" (UID: "22d4d35e-da87-4f57-a984-0bd8a4b10a3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:03 crc kubenswrapper[4753]: I1205 17:33:03.273504 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g8lr4\" (UniqueName: \"kubernetes.io/projected/22d4d35e-da87-4f57-a984-0bd8a4b10a3f-kube-api-access-g8lr4\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:03 crc kubenswrapper[4753]: I1205 17:33:03.319393 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d65f699f-l2m96"] Dec 05 17:33:03 crc kubenswrapper[4753]: I1205 17:33:03.346985 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57d65f699f-l2m96"] Dec 05 17:33:03 crc kubenswrapper[4753]: I1205 17:33:03.374832 4753 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22d4d35e-da87-4f57-a984-0bd8a4b10a3f-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:03 crc kubenswrapper[4753]: I1205 17:33:03.497433 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-lokistack-query-frontend-7c8cd744d9-vh5xk" Dec 05 17:33:03 crc kubenswrapper[4753]: I1205 17:33:03.559755 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-562p5"] Dec 05 17:33:03 crc kubenswrapper[4753]: W1205 17:33:03.559800 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode1e33c8a_695c_42cb_9e55_3a5346413faf.slice/crio-290df3fb7de1a514cae5164a20d8ef202184b5688a6ea3bf49aa7ad9dbf892bb WatchSource:0}: Error finding container 290df3fb7de1a514cae5164a20d8ef202184b5688a6ea3bf49aa7ad9dbf892bb: Status 404 returned error can't find the container with id 290df3fb7de1a514cae5164a20d8ef202184b5688a6ea3bf49aa7ad9dbf892bb Dec 05 17:33:03 crc kubenswrapper[4753]: I1205 17:33:03.602550 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-vznhm"] Dec 05 17:33:03 crc kubenswrapper[4753]: I1205 17:33:03.610891 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-vznhm"] Dec 05 17:33:03 crc kubenswrapper[4753]: I1205 17:33:03.637899 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 05 17:33:03 crc kubenswrapper[4753]: I1205 17:33:03.663592 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-sp59x"] Dec 05 17:33:03 crc kubenswrapper[4753]: I1205 17:33:03.734688 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="091fce24-9317-4efb-8ee3-ebb38f200312" path="/var/lib/kubelet/pods/091fce24-9317-4efb-8ee3-ebb38f200312/volumes" Dec 05 17:33:03 crc kubenswrapper[4753]: I1205 17:33:03.735319 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22d4d35e-da87-4f57-a984-0bd8a4b10a3f" path="/var/lib/kubelet/pods/22d4d35e-da87-4f57-a984-0bd8a4b10a3f/volumes" Dec 05 17:33:04 crc kubenswrapper[4753]: I1205 17:33:04.223713 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-z4g8x" event={"ID":"3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2","Type":"ContainerStarted","Data":"c5c030a133d88a455e107484e7e00dbe813dabe15f5d5eb2ca2e8ffcf1704fa1"} Dec 05 17:33:04 crc kubenswrapper[4753]: I1205 17:33:04.226511 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-sp59x" event={"ID":"85fb40be-ac2e-404f-912b-2831ae6eb795","Type":"ContainerStarted","Data":"b8b926aa459732ece13374fb2976a18311407e73017298e6bc3c4e5f33b4a817"} Dec 05 17:33:04 crc kubenswrapper[4753]: I1205 17:33:04.226553 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-sp59x" event={"ID":"85fb40be-ac2e-404f-912b-2831ae6eb795","Type":"ContainerStarted","Data":"8786e918671f4268fa66d73892ca0c63b37ef72cdf1bc781923b59eec4877668"} Dec 05 17:33:04 crc kubenswrapper[4753]: I1205 17:33:04.229063 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"9df69769-e394-444f-b6e2-e788e989fe92","Type":"ContainerStarted","Data":"79bb4a34df64cef502518c16a78629f4bf8b7540833d041e4f97af770c30e9a3"} Dec 05 17:33:04 crc kubenswrapper[4753]: I1205 17:33:04.231554 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"c44e1a93-e233-46a2-b18a-e6c8c396a394","Type":"ContainerStarted","Data":"266a9429f9ae2d3556d9ee339e91cfca6aa2516444e3c92a995ef68403d280db"} Dec 05 17:33:04 crc kubenswrapper[4753]: I1205 17:33:04.233178 4753 generic.go:334] "Generic (PLEG): container finished" podID="e1e33c8a-695c-42cb-9e55-3a5346413faf" containerID="e3bfbefa10648c8108c78f4e3459c915a9f09cb2d3e9165e64dc7351820ef803" exitCode=0 Dec 05 17:33:04 crc kubenswrapper[4753]: I1205 17:33:04.233269 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-562p5" event={"ID":"e1e33c8a-695c-42cb-9e55-3a5346413faf","Type":"ContainerDied","Data":"e3bfbefa10648c8108c78f4e3459c915a9f09cb2d3e9165e64dc7351820ef803"} Dec 05 17:33:04 crc kubenswrapper[4753]: I1205 17:33:04.233458 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-562p5" event={"ID":"e1e33c8a-695c-42cb-9e55-3a5346413faf","Type":"ContainerStarted","Data":"290df3fb7de1a514cae5164a20d8ef202184b5688a6ea3bf49aa7ad9dbf892bb"} Dec 05 17:33:04 crc kubenswrapper[4753]: I1205 17:33:04.234711 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"a2c3f794-ac1f-4115-bf82-a43f3a487332","Type":"ContainerStarted","Data":"21ac2bf77b54f3350a41afc2b8d5f977bc054061400774ab2a06e2637f3b2e72"} Dec 05 17:33:04 crc kubenswrapper[4753]: I1205 17:33:04.238915 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"4c8d8a7a-38bd-49d9-8f25-5495c32462bc","Type":"ContainerStarted","Data":"10f5a8c382e48b53284df6599cd4e885188f27532eb223af9f479f30dc7bde86"} Dec 05 17:33:04 crc kubenswrapper[4753]: I1205 17:33:04.255963 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-z4g8x" podStartSLOduration=3.2433976270000002 podStartE2EDuration="9.255944496s" podCreationTimestamp="2025-12-05 17:32:55 +0000 UTC" firstStartedPulling="2025-12-05 17:32:57.052715347 +0000 UTC m=+1715.555822353" lastFinishedPulling="2025-12-05 17:33:03.065262216 +0000 UTC m=+1721.568369222" observedRunningTime="2025-12-05 17:33:04.247405634 +0000 UTC m=+1722.750512650" watchObservedRunningTime="2025-12-05 17:33:04.255944496 +0000 UTC m=+1722.759051502" Dec 05 17:33:04 crc kubenswrapper[4753]: I1205 17:33:04.278922 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/prometheus-metric-storage-0" podStartSLOduration=35.846041563 podStartE2EDuration="1m4.278904646s" podCreationTimestamp="2025-12-05 17:32:00 +0000 UTC" firstStartedPulling="2025-12-05 17:32:34.590537297 +0000 UTC m=+1693.093644323" lastFinishedPulling="2025-12-05 17:33:03.0234004 +0000 UTC m=+1721.526507406" observedRunningTime="2025-12-05 17:33:04.276591521 +0000 UTC m=+1722.779698527" watchObservedRunningTime="2025-12-05 17:33:04.278904646 +0000 UTC m=+1722.782011652" Dec 05 17:33:04 crc kubenswrapper[4753]: I1205 17:33:04.336097 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=38.122744517 podStartE2EDuration="1m8.336081666s" podCreationTimestamp="2025-12-05 17:31:56 +0000 UTC" firstStartedPulling="2025-12-05 17:32:22.473203206 +0000 UTC m=+1680.976310212" lastFinishedPulling="2025-12-05 17:32:52.686540345 +0000 UTC m=+1711.189647361" observedRunningTime="2025-12-05 17:33:04.303659507 +0000 UTC m=+1722.806766523" watchObservedRunningTime="2025-12-05 17:33:04.336081666 +0000 UTC m=+1722.839188672" Dec 05 17:33:04 crc kubenswrapper[4753]: I1205 17:33:04.343666 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=-9223371969.511122 podStartE2EDuration="1m7.34365402s" podCreationTimestamp="2025-12-05 17:31:57 +0000 UTC" firstStartedPulling="2025-12-05 17:32:23.084918815 +0000 UTC m=+1681.588025811" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:33:04.330506348 +0000 UTC m=+1722.833613354" watchObservedRunningTime="2025-12-05 17:33:04.34365402 +0000 UTC m=+1722.846761026" Dec 05 17:33:04 crc kubenswrapper[4753]: I1205 17:33:04.356523 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-sp59x" podStartSLOduration=3.356506624 podStartE2EDuration="3.356506624s" podCreationTimestamp="2025-12-05 17:33:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:33:04.351029429 +0000 UTC m=+1722.854136425" watchObservedRunningTime="2025-12-05 17:33:04.356506624 +0000 UTC m=+1722.859613630" Dec 05 17:33:04 crc kubenswrapper[4753]: I1205 17:33:04.766357 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-lokistack-index-gateway-0" Dec 05 17:33:05 crc kubenswrapper[4753]: I1205 17:33:05.249010 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-562p5" event={"ID":"e1e33c8a-695c-42cb-9e55-3a5346413faf","Type":"ContainerStarted","Data":"bdeccf61f1b07b7d8bf5078dca6eb5383f15e0b0a2409c978233e3988824a823"} Dec 05 17:33:05 crc kubenswrapper[4753]: I1205 17:33:05.272800 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-b8fbc5445-562p5" podStartSLOduration=4.272780484 podStartE2EDuration="4.272780484s" podCreationTimestamp="2025-12-05 17:33:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:33:05.265773835 +0000 UTC m=+1723.768880841" watchObservedRunningTime="2025-12-05 17:33:05.272780484 +0000 UTC m=+1723.775887480" Dec 05 17:33:06 crc kubenswrapper[4753]: I1205 17:33:06.092900 4753 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="36088f6e-7c3c-4fee-918e-e1ee91bf6b33" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.111:5671: connect: connection refused" Dec 05 17:33:06 crc kubenswrapper[4753]: I1205 17:33:06.261613 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"a2c3f794-ac1f-4115-bf82-a43f3a487332","Type":"ContainerStarted","Data":"5b59e47f6e86c15cc51fe4d422b5b331f244f0d9fdc6596c3edb64e964b1090f"} Dec 05 17:33:06 crc kubenswrapper[4753]: I1205 17:33:06.261654 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"a2c3f794-ac1f-4115-bf82-a43f3a487332","Type":"ContainerStarted","Data":"64df6616db5a0dae213f13bcbf325700ea7a289dd59efcc99286b597f8f4e05a"} Dec 05 17:33:06 crc kubenswrapper[4753]: I1205 17:33:06.261668 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-b8fbc5445-562p5" Dec 05 17:33:06 crc kubenswrapper[4753]: I1205 17:33:06.261710 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Dec 05 17:33:06 crc kubenswrapper[4753]: I1205 17:33:06.291716 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=3.572379738 podStartE2EDuration="5.29169625s" podCreationTimestamp="2025-12-05 17:33:01 +0000 UTC" firstStartedPulling="2025-12-05 17:33:03.674801258 +0000 UTC m=+1722.177908264" lastFinishedPulling="2025-12-05 17:33:05.39411777 +0000 UTC m=+1723.897224776" observedRunningTime="2025-12-05 17:33:06.285527625 +0000 UTC m=+1724.788634641" watchObservedRunningTime="2025-12-05 17:33:06.29169625 +0000 UTC m=+1724.794803256" Dec 05 17:33:06 crc kubenswrapper[4753]: I1205 17:33:06.536898 4753 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="f5652c22-3bf2-454d-a4cf-fd0378f133b8" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.112:5671: connect: connection refused" Dec 05 17:33:07 crc kubenswrapper[4753]: I1205 17:33:07.246674 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/prometheus-metric-storage-0" Dec 05 17:33:07 crc kubenswrapper[4753]: I1205 17:33:07.720632 4753 scope.go:117] "RemoveContainer" containerID="87682a74661e693e498cd793cc20d16fc9f4a3b8a1a6b54f10285e2dcd15eafd" Dec 05 17:33:07 crc kubenswrapper[4753]: E1205 17:33:07.720860 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 17:33:07 crc kubenswrapper[4753]: I1205 17:33:07.960719 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Dec 05 17:33:07 crc kubenswrapper[4753]: I1205 17:33:07.960803 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Dec 05 17:33:08 crc kubenswrapper[4753]: I1205 17:33:08.020012 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f4707e97-4f70-42d5-959e-1d2c8a9629e5-etc-swift\") pod \"swift-storage-0\" (UID: \"f4707e97-4f70-42d5-959e-1d2c8a9629e5\") " pod="openstack/swift-storage-0" Dec 05 17:33:08 crc kubenswrapper[4753]: E1205 17:33:08.020276 4753 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 05 17:33:08 crc kubenswrapper[4753]: E1205 17:33:08.020304 4753 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 05 17:33:08 crc kubenswrapper[4753]: E1205 17:33:08.020376 4753 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/f4707e97-4f70-42d5-959e-1d2c8a9629e5-etc-swift podName:f4707e97-4f70-42d5-959e-1d2c8a9629e5 nodeName:}" failed. No retries permitted until 2025-12-05 17:33:24.020350356 +0000 UTC m=+1742.523457402 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/f4707e97-4f70-42d5-959e-1d2c8a9629e5-etc-swift") pod "swift-storage-0" (UID: "f4707e97-4f70-42d5-959e-1d2c8a9629e5") : configmap "swift-ring-files" not found Dec 05 17:33:09 crc kubenswrapper[4753]: I1205 17:33:09.428347 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Dec 05 17:33:09 crc kubenswrapper[4753]: I1205 17:33:09.428442 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Dec 05 17:33:09 crc kubenswrapper[4753]: I1205 17:33:09.535739 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Dec 05 17:33:10 crc kubenswrapper[4753]: I1205 17:33:10.335712 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Dec 05 17:33:10 crc kubenswrapper[4753]: I1205 17:33:10.415989 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Dec 05 17:33:10 crc kubenswrapper[4753]: I1205 17:33:10.478029 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Dec 05 17:33:12 crc kubenswrapper[4753]: I1205 17:33:12.312714 4753 generic.go:334] "Generic (PLEG): container finished" podID="3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2" containerID="c5c030a133d88a455e107484e7e00dbe813dabe15f5d5eb2ca2e8ffcf1704fa1" exitCode=0 Dec 05 17:33:12 crc kubenswrapper[4753]: I1205 17:33:12.312770 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-z4g8x" event={"ID":"3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2","Type":"ContainerDied","Data":"c5c030a133d88a455e107484e7e00dbe813dabe15f5d5eb2ca2e8ffcf1704fa1"} Dec 05 17:33:12 crc kubenswrapper[4753]: I1205 17:33:12.334580 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-b8fbc5445-562p5" Dec 05 17:33:12 crc kubenswrapper[4753]: I1205 17:33:12.406194 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7cb5889db5-9krvs"] Dec 05 17:33:12 crc kubenswrapper[4753]: I1205 17:33:12.406779 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7cb5889db5-9krvs" podUID="a91f1699-f2d1-4335-8f7f-cb44dae997da" containerName="dnsmasq-dns" containerID="cri-o://2da3a2f0122e24b1f2eb18963687f9177a7049ec3a69e3f139f90e86d370f924" gracePeriod=10 Dec 05 17:33:13 crc kubenswrapper[4753]: I1205 17:33:12.979926 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7cb5889db5-9krvs" Dec 05 17:33:13 crc kubenswrapper[4753]: I1205 17:33:13.022313 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a91f1699-f2d1-4335-8f7f-cb44dae997da-dns-svc\") pod \"a91f1699-f2d1-4335-8f7f-cb44dae997da\" (UID: \"a91f1699-f2d1-4335-8f7f-cb44dae997da\") " Dec 05 17:33:13 crc kubenswrapper[4753]: I1205 17:33:13.022510 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w2lvv\" (UniqueName: \"kubernetes.io/projected/a91f1699-f2d1-4335-8f7f-cb44dae997da-kube-api-access-w2lvv\") pod \"a91f1699-f2d1-4335-8f7f-cb44dae997da\" (UID: \"a91f1699-f2d1-4335-8f7f-cb44dae997da\") " Dec 05 17:33:13 crc kubenswrapper[4753]: I1205 17:33:13.022580 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a91f1699-f2d1-4335-8f7f-cb44dae997da-config\") pod \"a91f1699-f2d1-4335-8f7f-cb44dae997da\" (UID: \"a91f1699-f2d1-4335-8f7f-cb44dae997da\") " Dec 05 17:33:13 crc kubenswrapper[4753]: I1205 17:33:13.044546 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a91f1699-f2d1-4335-8f7f-cb44dae997da-kube-api-access-w2lvv" (OuterVolumeSpecName: "kube-api-access-w2lvv") pod "a91f1699-f2d1-4335-8f7f-cb44dae997da" (UID: "a91f1699-f2d1-4335-8f7f-cb44dae997da"). InnerVolumeSpecName "kube-api-access-w2lvv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:13 crc kubenswrapper[4753]: I1205 17:33:13.125368 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w2lvv\" (UniqueName: \"kubernetes.io/projected/a91f1699-f2d1-4335-8f7f-cb44dae997da-kube-api-access-w2lvv\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:13 crc kubenswrapper[4753]: I1205 17:33:13.140418 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a91f1699-f2d1-4335-8f7f-cb44dae997da-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "a91f1699-f2d1-4335-8f7f-cb44dae997da" (UID: "a91f1699-f2d1-4335-8f7f-cb44dae997da"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:13 crc kubenswrapper[4753]: I1205 17:33:13.165634 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a91f1699-f2d1-4335-8f7f-cb44dae997da-config" (OuterVolumeSpecName: "config") pod "a91f1699-f2d1-4335-8f7f-cb44dae997da" (UID: "a91f1699-f2d1-4335-8f7f-cb44dae997da"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:13 crc kubenswrapper[4753]: I1205 17:33:13.227018 4753 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a91f1699-f2d1-4335-8f7f-cb44dae997da-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:13 crc kubenswrapper[4753]: I1205 17:33:13.227051 4753 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a91f1699-f2d1-4335-8f7f-cb44dae997da-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:13 crc kubenswrapper[4753]: I1205 17:33:13.323128 4753 generic.go:334] "Generic (PLEG): container finished" podID="a91f1699-f2d1-4335-8f7f-cb44dae997da" containerID="2da3a2f0122e24b1f2eb18963687f9177a7049ec3a69e3f139f90e86d370f924" exitCode=0 Dec 05 17:33:13 crc kubenswrapper[4753]: I1205 17:33:13.323191 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cb5889db5-9krvs" event={"ID":"a91f1699-f2d1-4335-8f7f-cb44dae997da","Type":"ContainerDied","Data":"2da3a2f0122e24b1f2eb18963687f9177a7049ec3a69e3f139f90e86d370f924"} Dec 05 17:33:13 crc kubenswrapper[4753]: I1205 17:33:13.323246 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7cb5889db5-9krvs" event={"ID":"a91f1699-f2d1-4335-8f7f-cb44dae997da","Type":"ContainerDied","Data":"bfc43738d27a13f208969ceb53e28f9c5a590d39ae9e1167cf1968d6457c5d5d"} Dec 05 17:33:13 crc kubenswrapper[4753]: I1205 17:33:13.323204 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7cb5889db5-9krvs" Dec 05 17:33:13 crc kubenswrapper[4753]: I1205 17:33:13.323276 4753 scope.go:117] "RemoveContainer" containerID="2da3a2f0122e24b1f2eb18963687f9177a7049ec3a69e3f139f90e86d370f924" Dec 05 17:33:13 crc kubenswrapper[4753]: I1205 17:33:13.344738 4753 scope.go:117] "RemoveContainer" containerID="9c76fbf68c8fbe2c8007301247c7dbcde5dca3dfe45a42354d2c16e237cdd9a7" Dec 05 17:33:13 crc kubenswrapper[4753]: I1205 17:33:13.369289 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7cb5889db5-9krvs"] Dec 05 17:33:13 crc kubenswrapper[4753]: I1205 17:33:13.381348 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7cb5889db5-9krvs"] Dec 05 17:33:13 crc kubenswrapper[4753]: I1205 17:33:13.383635 4753 scope.go:117] "RemoveContainer" containerID="2da3a2f0122e24b1f2eb18963687f9177a7049ec3a69e3f139f90e86d370f924" Dec 05 17:33:13 crc kubenswrapper[4753]: E1205 17:33:13.388937 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2da3a2f0122e24b1f2eb18963687f9177a7049ec3a69e3f139f90e86d370f924\": container with ID starting with 2da3a2f0122e24b1f2eb18963687f9177a7049ec3a69e3f139f90e86d370f924 not found: ID does not exist" containerID="2da3a2f0122e24b1f2eb18963687f9177a7049ec3a69e3f139f90e86d370f924" Dec 05 17:33:13 crc kubenswrapper[4753]: I1205 17:33:13.388985 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2da3a2f0122e24b1f2eb18963687f9177a7049ec3a69e3f139f90e86d370f924"} err="failed to get container status \"2da3a2f0122e24b1f2eb18963687f9177a7049ec3a69e3f139f90e86d370f924\": rpc error: code = NotFound desc = could not find container \"2da3a2f0122e24b1f2eb18963687f9177a7049ec3a69e3f139f90e86d370f924\": container with ID starting with 2da3a2f0122e24b1f2eb18963687f9177a7049ec3a69e3f139f90e86d370f924 not found: ID does not exist" Dec 05 17:33:13 crc kubenswrapper[4753]: I1205 17:33:13.389014 4753 scope.go:117] "RemoveContainer" containerID="9c76fbf68c8fbe2c8007301247c7dbcde5dca3dfe45a42354d2c16e237cdd9a7" Dec 05 17:33:13 crc kubenswrapper[4753]: E1205 17:33:13.389431 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9c76fbf68c8fbe2c8007301247c7dbcde5dca3dfe45a42354d2c16e237cdd9a7\": container with ID starting with 9c76fbf68c8fbe2c8007301247c7dbcde5dca3dfe45a42354d2c16e237cdd9a7 not found: ID does not exist" containerID="9c76fbf68c8fbe2c8007301247c7dbcde5dca3dfe45a42354d2c16e237cdd9a7" Dec 05 17:33:13 crc kubenswrapper[4753]: I1205 17:33:13.389636 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9c76fbf68c8fbe2c8007301247c7dbcde5dca3dfe45a42354d2c16e237cdd9a7"} err="failed to get container status \"9c76fbf68c8fbe2c8007301247c7dbcde5dca3dfe45a42354d2c16e237cdd9a7\": rpc error: code = NotFound desc = could not find container \"9c76fbf68c8fbe2c8007301247c7dbcde5dca3dfe45a42354d2c16e237cdd9a7\": container with ID starting with 9c76fbf68c8fbe2c8007301247c7dbcde5dca3dfe45a42354d2c16e237cdd9a7 not found: ID does not exist" Dec 05 17:33:13 crc kubenswrapper[4753]: I1205 17:33:13.469782 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-lokistack-querier-5467947bf7-7l6pk" Dec 05 17:33:13 crc kubenswrapper[4753]: I1205 17:33:13.738403 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a91f1699-f2d1-4335-8f7f-cb44dae997da" path="/var/lib/kubelet/pods/a91f1699-f2d1-4335-8f7f-cb44dae997da/volumes" Dec 05 17:33:13 crc kubenswrapper[4753]: I1205 17:33:13.754472 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-z4g8x" Dec 05 17:33:13 crc kubenswrapper[4753]: I1205 17:33:13.861861 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2-dispersionconf\") pod \"3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2\" (UID: \"3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2\") " Dec 05 17:33:13 crc kubenswrapper[4753]: I1205 17:33:13.861913 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2-combined-ca-bundle\") pod \"3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2\" (UID: \"3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2\") " Dec 05 17:33:13 crc kubenswrapper[4753]: I1205 17:33:13.862054 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2-ring-data-devices\") pod \"3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2\" (UID: \"3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2\") " Dec 05 17:33:13 crc kubenswrapper[4753]: I1205 17:33:13.862086 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2-swiftconf\") pod \"3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2\" (UID: \"3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2\") " Dec 05 17:33:13 crc kubenswrapper[4753]: I1205 17:33:13.862118 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cbj5v\" (UniqueName: \"kubernetes.io/projected/3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2-kube-api-access-cbj5v\") pod \"3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2\" (UID: \"3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2\") " Dec 05 17:33:13 crc kubenswrapper[4753]: I1205 17:33:13.862193 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2-scripts\") pod \"3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2\" (UID: \"3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2\") " Dec 05 17:33:13 crc kubenswrapper[4753]: I1205 17:33:13.862515 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2-etc-swift\") pod \"3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2\" (UID: \"3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2\") " Dec 05 17:33:13 crc kubenswrapper[4753]: I1205 17:33:13.864871 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2" (UID: "3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:13 crc kubenswrapper[4753]: I1205 17:33:13.865269 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2" (UID: "3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:33:13 crc kubenswrapper[4753]: I1205 17:33:13.868536 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2-kube-api-access-cbj5v" (OuterVolumeSpecName: "kube-api-access-cbj5v") pod "3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2" (UID: "3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2"). InnerVolumeSpecName "kube-api-access-cbj5v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:13 crc kubenswrapper[4753]: I1205 17:33:13.872015 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2" (UID: "3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:13 crc kubenswrapper[4753]: I1205 17:33:13.896796 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2" (UID: "3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:13 crc kubenswrapper[4753]: I1205 17:33:13.896543 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2-scripts" (OuterVolumeSpecName: "scripts") pod "3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2" (UID: "3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:13 crc kubenswrapper[4753]: I1205 17:33:13.900775 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2" (UID: "3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:13 crc kubenswrapper[4753]: I1205 17:33:13.965230 4753 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2-ring-data-devices\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:13 crc kubenswrapper[4753]: I1205 17:33:13.965270 4753 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2-swiftconf\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:13 crc kubenswrapper[4753]: I1205 17:33:13.965279 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cbj5v\" (UniqueName: \"kubernetes.io/projected/3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2-kube-api-access-cbj5v\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:13 crc kubenswrapper[4753]: I1205 17:33:13.965291 4753 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:13 crc kubenswrapper[4753]: I1205 17:33:13.965301 4753 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:13 crc kubenswrapper[4753]: I1205 17:33:13.965310 4753 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2-dispersionconf\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:13 crc kubenswrapper[4753]: I1205 17:33:13.965320 4753 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:14 crc kubenswrapper[4753]: I1205 17:33:14.337043 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-z4g8x" event={"ID":"3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2","Type":"ContainerDied","Data":"78f01af2815ef193d82323e8d496f76789ec4044f4fadc5091494043aea57f67"} Dec 05 17:33:14 crc kubenswrapper[4753]: I1205 17:33:14.337098 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="78f01af2815ef193d82323e8d496f76789ec4044f4fadc5091494043aea57f67" Dec 05 17:33:14 crc kubenswrapper[4753]: I1205 17:33:14.337132 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-z4g8x" Dec 05 17:33:14 crc kubenswrapper[4753]: I1205 17:33:14.346687 4753 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cloudkitty-lokistack-ingester-0" podUID="227cc7e4-602f-4c1e-afa7-0e106d3f505f" containerName="loki-ingester" probeResult="failure" output="HTTP probe failed with statuscode: 503" Dec 05 17:33:14 crc kubenswrapper[4753]: I1205 17:33:14.450569 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-lokistack-compactor-0" Dec 05 17:33:16 crc kubenswrapper[4753]: I1205 17:33:16.092332 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 05 17:33:16 crc kubenswrapper[4753]: I1205 17:33:16.442333 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-d829-account-create-update-87s9q"] Dec 05 17:33:16 crc kubenswrapper[4753]: E1205 17:33:16.442710 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2" containerName="swift-ring-rebalance" Dec 05 17:33:16 crc kubenswrapper[4753]: I1205 17:33:16.442742 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2" containerName="swift-ring-rebalance" Dec 05 17:33:16 crc kubenswrapper[4753]: E1205 17:33:16.442764 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="22d4d35e-da87-4f57-a984-0bd8a4b10a3f" containerName="init" Dec 05 17:33:16 crc kubenswrapper[4753]: I1205 17:33:16.442771 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="22d4d35e-da87-4f57-a984-0bd8a4b10a3f" containerName="init" Dec 05 17:33:16 crc kubenswrapper[4753]: E1205 17:33:16.442787 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a91f1699-f2d1-4335-8f7f-cb44dae997da" containerName="init" Dec 05 17:33:16 crc kubenswrapper[4753]: I1205 17:33:16.442794 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="a91f1699-f2d1-4335-8f7f-cb44dae997da" containerName="init" Dec 05 17:33:16 crc kubenswrapper[4753]: E1205 17:33:16.442808 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a91f1699-f2d1-4335-8f7f-cb44dae997da" containerName="dnsmasq-dns" Dec 05 17:33:16 crc kubenswrapper[4753]: I1205 17:33:16.442814 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="a91f1699-f2d1-4335-8f7f-cb44dae997da" containerName="dnsmasq-dns" Dec 05 17:33:16 crc kubenswrapper[4753]: I1205 17:33:16.442977 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="a91f1699-f2d1-4335-8f7f-cb44dae997da" containerName="dnsmasq-dns" Dec 05 17:33:16 crc kubenswrapper[4753]: I1205 17:33:16.442993 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2" containerName="swift-ring-rebalance" Dec 05 17:33:16 crc kubenswrapper[4753]: I1205 17:33:16.443008 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="22d4d35e-da87-4f57-a984-0bd8a4b10a3f" containerName="init" Dec 05 17:33:16 crc kubenswrapper[4753]: I1205 17:33:16.443670 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-d829-account-create-update-87s9q" Dec 05 17:33:16 crc kubenswrapper[4753]: I1205 17:33:16.447019 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Dec 05 17:33:16 crc kubenswrapper[4753]: I1205 17:33:16.454469 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-d829-account-create-update-87s9q"] Dec 05 17:33:16 crc kubenswrapper[4753]: I1205 17:33:16.513759 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4v4rm\" (UniqueName: \"kubernetes.io/projected/7ceaaa26-c606-4aba-95c4-837d79fac0b8-kube-api-access-4v4rm\") pod \"cinder-d829-account-create-update-87s9q\" (UID: \"7ceaaa26-c606-4aba-95c4-837d79fac0b8\") " pod="openstack/cinder-d829-account-create-update-87s9q" Dec 05 17:33:16 crc kubenswrapper[4753]: I1205 17:33:16.513888 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7ceaaa26-c606-4aba-95c4-837d79fac0b8-operator-scripts\") pod \"cinder-d829-account-create-update-87s9q\" (UID: \"7ceaaa26-c606-4aba-95c4-837d79fac0b8\") " pod="openstack/cinder-d829-account-create-update-87s9q" Dec 05 17:33:16 crc kubenswrapper[4753]: I1205 17:33:16.535286 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:33:16 crc kubenswrapper[4753]: I1205 17:33:16.541425 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-96mhr"] Dec 05 17:33:16 crc kubenswrapper[4753]: I1205 17:33:16.542852 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-96mhr" Dec 05 17:33:16 crc kubenswrapper[4753]: I1205 17:33:16.558342 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-96mhr"] Dec 05 17:33:16 crc kubenswrapper[4753]: I1205 17:33:16.615600 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9ch2t\" (UniqueName: \"kubernetes.io/projected/eb4d18c1-5c7a-49e1-8662-29758295058b-kube-api-access-9ch2t\") pod \"cinder-db-create-96mhr\" (UID: \"eb4d18c1-5c7a-49e1-8662-29758295058b\") " pod="openstack/cinder-db-create-96mhr" Dec 05 17:33:16 crc kubenswrapper[4753]: I1205 17:33:16.615671 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4v4rm\" (UniqueName: \"kubernetes.io/projected/7ceaaa26-c606-4aba-95c4-837d79fac0b8-kube-api-access-4v4rm\") pod \"cinder-d829-account-create-update-87s9q\" (UID: \"7ceaaa26-c606-4aba-95c4-837d79fac0b8\") " pod="openstack/cinder-d829-account-create-update-87s9q" Dec 05 17:33:16 crc kubenswrapper[4753]: I1205 17:33:16.615760 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eb4d18c1-5c7a-49e1-8662-29758295058b-operator-scripts\") pod \"cinder-db-create-96mhr\" (UID: \"eb4d18c1-5c7a-49e1-8662-29758295058b\") " pod="openstack/cinder-db-create-96mhr" Dec 05 17:33:16 crc kubenswrapper[4753]: I1205 17:33:16.615865 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7ceaaa26-c606-4aba-95c4-837d79fac0b8-operator-scripts\") pod \"cinder-d829-account-create-update-87s9q\" (UID: \"7ceaaa26-c606-4aba-95c4-837d79fac0b8\") " pod="openstack/cinder-d829-account-create-update-87s9q" Dec 05 17:33:16 crc kubenswrapper[4753]: I1205 17:33:16.616982 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7ceaaa26-c606-4aba-95c4-837d79fac0b8-operator-scripts\") pod \"cinder-d829-account-create-update-87s9q\" (UID: \"7ceaaa26-c606-4aba-95c4-837d79fac0b8\") " pod="openstack/cinder-d829-account-create-update-87s9q" Dec 05 17:33:16 crc kubenswrapper[4753]: I1205 17:33:16.649284 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-58dh4"] Dec 05 17:33:16 crc kubenswrapper[4753]: I1205 17:33:16.650564 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-58dh4" Dec 05 17:33:16 crc kubenswrapper[4753]: I1205 17:33:16.657037 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4v4rm\" (UniqueName: \"kubernetes.io/projected/7ceaaa26-c606-4aba-95c4-837d79fac0b8-kube-api-access-4v4rm\") pod \"cinder-d829-account-create-update-87s9q\" (UID: \"7ceaaa26-c606-4aba-95c4-837d79fac0b8\") " pod="openstack/cinder-d829-account-create-update-87s9q" Dec 05 17:33:16 crc kubenswrapper[4753]: I1205 17:33:16.660908 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-1e4c-account-create-update-5s55z"] Dec 05 17:33:16 crc kubenswrapper[4753]: I1205 17:33:16.662242 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-1e4c-account-create-update-5s55z" Dec 05 17:33:16 crc kubenswrapper[4753]: I1205 17:33:16.670223 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-58dh4"] Dec 05 17:33:16 crc kubenswrapper[4753]: I1205 17:33:16.670437 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-db-secret" Dec 05 17:33:16 crc kubenswrapper[4753]: I1205 17:33:16.676851 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-1e4c-account-create-update-5s55z"] Dec 05 17:33:16 crc kubenswrapper[4753]: I1205 17:33:16.717137 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9ch2t\" (UniqueName: \"kubernetes.io/projected/eb4d18c1-5c7a-49e1-8662-29758295058b-kube-api-access-9ch2t\") pod \"cinder-db-create-96mhr\" (UID: \"eb4d18c1-5c7a-49e1-8662-29758295058b\") " pod="openstack/cinder-db-create-96mhr" Dec 05 17:33:16 crc kubenswrapper[4753]: I1205 17:33:16.717536 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/842c7431-4ec7-4dae-98ea-c1d6482295f9-operator-scripts\") pod \"barbican-db-create-58dh4\" (UID: \"842c7431-4ec7-4dae-98ea-c1d6482295f9\") " pod="openstack/barbican-db-create-58dh4" Dec 05 17:33:16 crc kubenswrapper[4753]: I1205 17:33:16.717560 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hbpvt\" (UniqueName: \"kubernetes.io/projected/5af8977e-c995-4b12-a427-c4223f563be6-kube-api-access-hbpvt\") pod \"cloudkitty-1e4c-account-create-update-5s55z\" (UID: \"5af8977e-c995-4b12-a427-c4223f563be6\") " pod="openstack/cloudkitty-1e4c-account-create-update-5s55z" Dec 05 17:33:16 crc kubenswrapper[4753]: I1205 17:33:16.717594 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eb4d18c1-5c7a-49e1-8662-29758295058b-operator-scripts\") pod \"cinder-db-create-96mhr\" (UID: \"eb4d18c1-5c7a-49e1-8662-29758295058b\") " pod="openstack/cinder-db-create-96mhr" Dec 05 17:33:16 crc kubenswrapper[4753]: I1205 17:33:16.717624 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gdkts\" (UniqueName: \"kubernetes.io/projected/842c7431-4ec7-4dae-98ea-c1d6482295f9-kube-api-access-gdkts\") pod \"barbican-db-create-58dh4\" (UID: \"842c7431-4ec7-4dae-98ea-c1d6482295f9\") " pod="openstack/barbican-db-create-58dh4" Dec 05 17:33:16 crc kubenswrapper[4753]: I1205 17:33:16.717675 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5af8977e-c995-4b12-a427-c4223f563be6-operator-scripts\") pod \"cloudkitty-1e4c-account-create-update-5s55z\" (UID: \"5af8977e-c995-4b12-a427-c4223f563be6\") " pod="openstack/cloudkitty-1e4c-account-create-update-5s55z" Dec 05 17:33:16 crc kubenswrapper[4753]: I1205 17:33:16.718540 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eb4d18c1-5c7a-49e1-8662-29758295058b-operator-scripts\") pod \"cinder-db-create-96mhr\" (UID: \"eb4d18c1-5c7a-49e1-8662-29758295058b\") " pod="openstack/cinder-db-create-96mhr" Dec 05 17:33:16 crc kubenswrapper[4753]: I1205 17:33:16.756002 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9ch2t\" (UniqueName: \"kubernetes.io/projected/eb4d18c1-5c7a-49e1-8662-29758295058b-kube-api-access-9ch2t\") pod \"cinder-db-create-96mhr\" (UID: \"eb4d18c1-5c7a-49e1-8662-29758295058b\") " pod="openstack/cinder-db-create-96mhr" Dec 05 17:33:16 crc kubenswrapper[4753]: I1205 17:33:16.763753 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-d829-account-create-update-87s9q" Dec 05 17:33:16 crc kubenswrapper[4753]: I1205 17:33:16.794071 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-affb-account-create-update-m555z"] Dec 05 17:33:16 crc kubenswrapper[4753]: I1205 17:33:16.795264 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-affb-account-create-update-m555z" Dec 05 17:33:16 crc kubenswrapper[4753]: I1205 17:33:16.799682 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Dec 05 17:33:16 crc kubenswrapper[4753]: I1205 17:33:16.807597 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-affb-account-create-update-m555z"] Dec 05 17:33:16 crc kubenswrapper[4753]: I1205 17:33:16.819954 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qnm4g\" (UniqueName: \"kubernetes.io/projected/1f6e2320-db10-4bfb-a6ac-f2c24dcd91b1-kube-api-access-qnm4g\") pod \"barbican-affb-account-create-update-m555z\" (UID: \"1f6e2320-db10-4bfb-a6ac-f2c24dcd91b1\") " pod="openstack/barbican-affb-account-create-update-m555z" Dec 05 17:33:16 crc kubenswrapper[4753]: I1205 17:33:16.820052 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/842c7431-4ec7-4dae-98ea-c1d6482295f9-operator-scripts\") pod \"barbican-db-create-58dh4\" (UID: \"842c7431-4ec7-4dae-98ea-c1d6482295f9\") " pod="openstack/barbican-db-create-58dh4" Dec 05 17:33:16 crc kubenswrapper[4753]: I1205 17:33:16.820081 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hbpvt\" (UniqueName: \"kubernetes.io/projected/5af8977e-c995-4b12-a427-c4223f563be6-kube-api-access-hbpvt\") pod \"cloudkitty-1e4c-account-create-update-5s55z\" (UID: \"5af8977e-c995-4b12-a427-c4223f563be6\") " pod="openstack/cloudkitty-1e4c-account-create-update-5s55z" Dec 05 17:33:16 crc kubenswrapper[4753]: I1205 17:33:16.820133 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gdkts\" (UniqueName: \"kubernetes.io/projected/842c7431-4ec7-4dae-98ea-c1d6482295f9-kube-api-access-gdkts\") pod \"barbican-db-create-58dh4\" (UID: \"842c7431-4ec7-4dae-98ea-c1d6482295f9\") " pod="openstack/barbican-db-create-58dh4" Dec 05 17:33:16 crc kubenswrapper[4753]: I1205 17:33:16.820284 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1f6e2320-db10-4bfb-a6ac-f2c24dcd91b1-operator-scripts\") pod \"barbican-affb-account-create-update-m555z\" (UID: \"1f6e2320-db10-4bfb-a6ac-f2c24dcd91b1\") " pod="openstack/barbican-affb-account-create-update-m555z" Dec 05 17:33:16 crc kubenswrapper[4753]: I1205 17:33:16.820322 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5af8977e-c995-4b12-a427-c4223f563be6-operator-scripts\") pod \"cloudkitty-1e4c-account-create-update-5s55z\" (UID: \"5af8977e-c995-4b12-a427-c4223f563be6\") " pod="openstack/cloudkitty-1e4c-account-create-update-5s55z" Dec 05 17:33:16 crc kubenswrapper[4753]: I1205 17:33:16.821907 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/842c7431-4ec7-4dae-98ea-c1d6482295f9-operator-scripts\") pod \"barbican-db-create-58dh4\" (UID: \"842c7431-4ec7-4dae-98ea-c1d6482295f9\") " pod="openstack/barbican-db-create-58dh4" Dec 05 17:33:16 crc kubenswrapper[4753]: I1205 17:33:16.824056 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5af8977e-c995-4b12-a427-c4223f563be6-operator-scripts\") pod \"cloudkitty-1e4c-account-create-update-5s55z\" (UID: \"5af8977e-c995-4b12-a427-c4223f563be6\") " pod="openstack/cloudkitty-1e4c-account-create-update-5s55z" Dec 05 17:33:16 crc kubenswrapper[4753]: I1205 17:33:16.846313 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hbpvt\" (UniqueName: \"kubernetes.io/projected/5af8977e-c995-4b12-a427-c4223f563be6-kube-api-access-hbpvt\") pod \"cloudkitty-1e4c-account-create-update-5s55z\" (UID: \"5af8977e-c995-4b12-a427-c4223f563be6\") " pod="openstack/cloudkitty-1e4c-account-create-update-5s55z" Dec 05 17:33:16 crc kubenswrapper[4753]: I1205 17:33:16.846892 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gdkts\" (UniqueName: \"kubernetes.io/projected/842c7431-4ec7-4dae-98ea-c1d6482295f9-kube-api-access-gdkts\") pod \"barbican-db-create-58dh4\" (UID: \"842c7431-4ec7-4dae-98ea-c1d6482295f9\") " pod="openstack/barbican-db-create-58dh4" Dec 05 17:33:16 crc kubenswrapper[4753]: I1205 17:33:16.862362 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-96mhr" Dec 05 17:33:16 crc kubenswrapper[4753]: I1205 17:33:16.924580 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qnm4g\" (UniqueName: \"kubernetes.io/projected/1f6e2320-db10-4bfb-a6ac-f2c24dcd91b1-kube-api-access-qnm4g\") pod \"barbican-affb-account-create-update-m555z\" (UID: \"1f6e2320-db10-4bfb-a6ac-f2c24dcd91b1\") " pod="openstack/barbican-affb-account-create-update-m555z" Dec 05 17:33:16 crc kubenswrapper[4753]: I1205 17:33:16.924716 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1f6e2320-db10-4bfb-a6ac-f2c24dcd91b1-operator-scripts\") pod \"barbican-affb-account-create-update-m555z\" (UID: \"1f6e2320-db10-4bfb-a6ac-f2c24dcd91b1\") " pod="openstack/barbican-affb-account-create-update-m555z" Dec 05 17:33:16 crc kubenswrapper[4753]: I1205 17:33:16.925671 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1f6e2320-db10-4bfb-a6ac-f2c24dcd91b1-operator-scripts\") pod \"barbican-affb-account-create-update-m555z\" (UID: \"1f6e2320-db10-4bfb-a6ac-f2c24dcd91b1\") " pod="openstack/barbican-affb-account-create-update-m555z" Dec 05 17:33:16 crc kubenswrapper[4753]: I1205 17:33:16.951504 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qnm4g\" (UniqueName: \"kubernetes.io/projected/1f6e2320-db10-4bfb-a6ac-f2c24dcd91b1-kube-api-access-qnm4g\") pod \"barbican-affb-account-create-update-m555z\" (UID: \"1f6e2320-db10-4bfb-a6ac-f2c24dcd91b1\") " pod="openstack/barbican-affb-account-create-update-m555z" Dec 05 17:33:17 crc kubenswrapper[4753]: I1205 17:33:17.036337 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-58dh4" Dec 05 17:33:17 crc kubenswrapper[4753]: I1205 17:33:17.057793 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-443d-account-create-update-fzttf"] Dec 05 17:33:17 crc kubenswrapper[4753]: I1205 17:33:17.064612 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-1e4c-account-create-update-5s55z" Dec 05 17:33:17 crc kubenswrapper[4753]: I1205 17:33:17.069742 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-443d-account-create-update-fzttf" Dec 05 17:33:17 crc kubenswrapper[4753]: I1205 17:33:17.075171 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Dec 05 17:33:17 crc kubenswrapper[4753]: I1205 17:33:17.114747 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-db-create-ks6km"] Dec 05 17:33:17 crc kubenswrapper[4753]: I1205 17:33:17.119959 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-create-ks6km" Dec 05 17:33:17 crc kubenswrapper[4753]: I1205 17:33:17.144631 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dc2dad3f-2686-4fdc-8686-aeed53073cd2-operator-scripts\") pod \"cloudkitty-db-create-ks6km\" (UID: \"dc2dad3f-2686-4fdc-8686-aeed53073cd2\") " pod="openstack/cloudkitty-db-create-ks6km" Dec 05 17:33:17 crc kubenswrapper[4753]: I1205 17:33:17.144725 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r228z\" (UniqueName: \"kubernetes.io/projected/dc2dad3f-2686-4fdc-8686-aeed53073cd2-kube-api-access-r228z\") pod \"cloudkitty-db-create-ks6km\" (UID: \"dc2dad3f-2686-4fdc-8686-aeed53073cd2\") " pod="openstack/cloudkitty-db-create-ks6km" Dec 05 17:33:17 crc kubenswrapper[4753]: I1205 17:33:17.144911 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qf2nt\" (UniqueName: \"kubernetes.io/projected/00d86d0d-6896-435e-808f-eec9c8225f99-kube-api-access-qf2nt\") pod \"neutron-443d-account-create-update-fzttf\" (UID: \"00d86d0d-6896-435e-808f-eec9c8225f99\") " pod="openstack/neutron-443d-account-create-update-fzttf" Dec 05 17:33:17 crc kubenswrapper[4753]: I1205 17:33:17.145018 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/00d86d0d-6896-435e-808f-eec9c8225f99-operator-scripts\") pod \"neutron-443d-account-create-update-fzttf\" (UID: \"00d86d0d-6896-435e-808f-eec9c8225f99\") " pod="openstack/neutron-443d-account-create-update-fzttf" Dec 05 17:33:17 crc kubenswrapper[4753]: I1205 17:33:17.162642 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-443d-account-create-update-fzttf"] Dec 05 17:33:17 crc kubenswrapper[4753]: I1205 17:33:17.174258 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-db-create-ks6km"] Dec 05 17:33:17 crc kubenswrapper[4753]: I1205 17:33:17.182258 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Dec 05 17:33:17 crc kubenswrapper[4753]: I1205 17:33:17.203409 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-k7h4r"] Dec 05 17:33:17 crc kubenswrapper[4753]: I1205 17:33:17.204790 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-k7h4r" Dec 05 17:33:17 crc kubenswrapper[4753]: I1205 17:33:17.213907 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-k7h4r"] Dec 05 17:33:17 crc kubenswrapper[4753]: I1205 17:33:17.229826 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-d829-account-create-update-87s9q"] Dec 05 17:33:17 crc kubenswrapper[4753]: I1205 17:33:17.239656 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-affb-account-create-update-m555z" Dec 05 17:33:17 crc kubenswrapper[4753]: I1205 17:33:17.247433 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/prometheus-metric-storage-0" Dec 05 17:33:17 crc kubenswrapper[4753]: I1205 17:33:17.254883 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dc2dad3f-2686-4fdc-8686-aeed53073cd2-operator-scripts\") pod \"cloudkitty-db-create-ks6km\" (UID: \"dc2dad3f-2686-4fdc-8686-aeed53073cd2\") " pod="openstack/cloudkitty-db-create-ks6km" Dec 05 17:33:17 crc kubenswrapper[4753]: I1205 17:33:17.254924 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/afe6484a-49e0-4297-b722-3ad1eb80f936-operator-scripts\") pod \"neutron-db-create-k7h4r\" (UID: \"afe6484a-49e0-4297-b722-3ad1eb80f936\") " pod="openstack/neutron-db-create-k7h4r" Dec 05 17:33:17 crc kubenswrapper[4753]: I1205 17:33:17.254960 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r228z\" (UniqueName: \"kubernetes.io/projected/dc2dad3f-2686-4fdc-8686-aeed53073cd2-kube-api-access-r228z\") pod \"cloudkitty-db-create-ks6km\" (UID: \"dc2dad3f-2686-4fdc-8686-aeed53073cd2\") " pod="openstack/cloudkitty-db-create-ks6km" Dec 05 17:33:17 crc kubenswrapper[4753]: I1205 17:33:17.254989 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hrv5h\" (UniqueName: \"kubernetes.io/projected/afe6484a-49e0-4297-b722-3ad1eb80f936-kube-api-access-hrv5h\") pod \"neutron-db-create-k7h4r\" (UID: \"afe6484a-49e0-4297-b722-3ad1eb80f936\") " pod="openstack/neutron-db-create-k7h4r" Dec 05 17:33:17 crc kubenswrapper[4753]: I1205 17:33:17.255033 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qf2nt\" (UniqueName: \"kubernetes.io/projected/00d86d0d-6896-435e-808f-eec9c8225f99-kube-api-access-qf2nt\") pod \"neutron-443d-account-create-update-fzttf\" (UID: \"00d86d0d-6896-435e-808f-eec9c8225f99\") " pod="openstack/neutron-443d-account-create-update-fzttf" Dec 05 17:33:17 crc kubenswrapper[4753]: I1205 17:33:17.255081 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/00d86d0d-6896-435e-808f-eec9c8225f99-operator-scripts\") pod \"neutron-443d-account-create-update-fzttf\" (UID: \"00d86d0d-6896-435e-808f-eec9c8225f99\") " pod="openstack/neutron-443d-account-create-update-fzttf" Dec 05 17:33:17 crc kubenswrapper[4753]: I1205 17:33:17.259060 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dc2dad3f-2686-4fdc-8686-aeed53073cd2-operator-scripts\") pod \"cloudkitty-db-create-ks6km\" (UID: \"dc2dad3f-2686-4fdc-8686-aeed53073cd2\") " pod="openstack/cloudkitty-db-create-ks6km" Dec 05 17:33:17 crc kubenswrapper[4753]: I1205 17:33:17.264809 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/prometheus-metric-storage-0" Dec 05 17:33:17 crc kubenswrapper[4753]: I1205 17:33:17.265923 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/00d86d0d-6896-435e-808f-eec9c8225f99-operator-scripts\") pod \"neutron-443d-account-create-update-fzttf\" (UID: \"00d86d0d-6896-435e-808f-eec9c8225f99\") " pod="openstack/neutron-443d-account-create-update-fzttf" Dec 05 17:33:17 crc kubenswrapper[4753]: I1205 17:33:17.300746 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qf2nt\" (UniqueName: \"kubernetes.io/projected/00d86d0d-6896-435e-808f-eec9c8225f99-kube-api-access-qf2nt\") pod \"neutron-443d-account-create-update-fzttf\" (UID: \"00d86d0d-6896-435e-808f-eec9c8225f99\") " pod="openstack/neutron-443d-account-create-update-fzttf" Dec 05 17:33:17 crc kubenswrapper[4753]: I1205 17:33:17.304933 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r228z\" (UniqueName: \"kubernetes.io/projected/dc2dad3f-2686-4fdc-8686-aeed53073cd2-kube-api-access-r228z\") pod \"cloudkitty-db-create-ks6km\" (UID: \"dc2dad3f-2686-4fdc-8686-aeed53073cd2\") " pod="openstack/cloudkitty-db-create-ks6km" Dec 05 17:33:17 crc kubenswrapper[4753]: I1205 17:33:17.356533 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/afe6484a-49e0-4297-b722-3ad1eb80f936-operator-scripts\") pod \"neutron-db-create-k7h4r\" (UID: \"afe6484a-49e0-4297-b722-3ad1eb80f936\") " pod="openstack/neutron-db-create-k7h4r" Dec 05 17:33:17 crc kubenswrapper[4753]: I1205 17:33:17.356603 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hrv5h\" (UniqueName: \"kubernetes.io/projected/afe6484a-49e0-4297-b722-3ad1eb80f936-kube-api-access-hrv5h\") pod \"neutron-db-create-k7h4r\" (UID: \"afe6484a-49e0-4297-b722-3ad1eb80f936\") " pod="openstack/neutron-db-create-k7h4r" Dec 05 17:33:17 crc kubenswrapper[4753]: I1205 17:33:17.357796 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/afe6484a-49e0-4297-b722-3ad1eb80f936-operator-scripts\") pod \"neutron-db-create-k7h4r\" (UID: \"afe6484a-49e0-4297-b722-3ad1eb80f936\") " pod="openstack/neutron-db-create-k7h4r" Dec 05 17:33:17 crc kubenswrapper[4753]: I1205 17:33:17.377755 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-d829-account-create-update-87s9q" event={"ID":"7ceaaa26-c606-4aba-95c4-837d79fac0b8","Type":"ContainerStarted","Data":"632304badd88598afaa22d84b0c3fc0d5cb189912c181d033c2f2ee2f9e31bb3"} Dec 05 17:33:17 crc kubenswrapper[4753]: I1205 17:33:17.383114 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/prometheus-metric-storage-0" Dec 05 17:33:17 crc kubenswrapper[4753]: I1205 17:33:17.397433 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hrv5h\" (UniqueName: \"kubernetes.io/projected/afe6484a-49e0-4297-b722-3ad1eb80f936-kube-api-access-hrv5h\") pod \"neutron-db-create-k7h4r\" (UID: \"afe6484a-49e0-4297-b722-3ad1eb80f936\") " pod="openstack/neutron-db-create-k7h4r" Dec 05 17:33:17 crc kubenswrapper[4753]: I1205 17:33:17.407602 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-443d-account-create-update-fzttf" Dec 05 17:33:17 crc kubenswrapper[4753]: I1205 17:33:17.476501 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-create-ks6km" Dec 05 17:33:17 crc kubenswrapper[4753]: I1205 17:33:17.520919 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-k7h4r" Dec 05 17:33:17 crc kubenswrapper[4753]: I1205 17:33:17.562406 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-96mhr"] Dec 05 17:33:17 crc kubenswrapper[4753]: W1205 17:33:17.580955 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podeb4d18c1_5c7a_49e1_8662_29758295058b.slice/crio-d5a6640ded8a752d98fe57426bbadee987ec34e766f4d3b246b3253a11fa8139 WatchSource:0}: Error finding container d5a6640ded8a752d98fe57426bbadee987ec34e766f4d3b246b3253a11fa8139: Status 404 returned error can't find the container with id d5a6640ded8a752d98fe57426bbadee987ec34e766f4d3b246b3253a11fa8139 Dec 05 17:33:17 crc kubenswrapper[4753]: I1205 17:33:17.881093 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-1e4c-account-create-update-5s55z"] Dec 05 17:33:17 crc kubenswrapper[4753]: I1205 17:33:17.917482 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-58dh4"] Dec 05 17:33:18 crc kubenswrapper[4753]: I1205 17:33:18.156651 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-affb-account-create-update-m555z"] Dec 05 17:33:18 crc kubenswrapper[4753]: I1205 17:33:18.221209 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-443d-account-create-update-fzttf"] Dec 05 17:33:18 crc kubenswrapper[4753]: I1205 17:33:18.272549 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-k7h4r"] Dec 05 17:33:18 crc kubenswrapper[4753]: I1205 17:33:18.302236 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-db-create-ks6km"] Dec 05 17:33:18 crc kubenswrapper[4753]: W1205 17:33:18.330031 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddc2dad3f_2686_4fdc_8686_aeed53073cd2.slice/crio-80bde15ff616600befd893df742612c121ca9508630561143e51374d5d069382 WatchSource:0}: Error finding container 80bde15ff616600befd893df742612c121ca9508630561143e51374d5d069382: Status 404 returned error can't find the container with id 80bde15ff616600befd893df742612c121ca9508630561143e51374d5d069382 Dec 05 17:33:18 crc kubenswrapper[4753]: I1205 17:33:18.387897 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-443d-account-create-update-fzttf" event={"ID":"00d86d0d-6896-435e-808f-eec9c8225f99","Type":"ContainerStarted","Data":"2c37b039ede04a74c28a7b3b237b8ffc9042ad09c0aa4c14d4a4581c2a6c11af"} Dec 05 17:33:18 crc kubenswrapper[4753]: I1205 17:33:18.390296 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-96mhr" event={"ID":"eb4d18c1-5c7a-49e1-8662-29758295058b","Type":"ContainerStarted","Data":"d5a6640ded8a752d98fe57426bbadee987ec34e766f4d3b246b3253a11fa8139"} Dec 05 17:33:18 crc kubenswrapper[4753]: I1205 17:33:18.391761 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-create-ks6km" event={"ID":"dc2dad3f-2686-4fdc-8686-aeed53073cd2","Type":"ContainerStarted","Data":"80bde15ff616600befd893df742612c121ca9508630561143e51374d5d069382"} Dec 05 17:33:18 crc kubenswrapper[4753]: I1205 17:33:18.395209 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-1e4c-account-create-update-5s55z" event={"ID":"5af8977e-c995-4b12-a427-c4223f563be6","Type":"ContainerStarted","Data":"c1e6a03c894742282798bb9ac61f1ba31147b869bcbe8706f8249b158e04bd85"} Dec 05 17:33:18 crc kubenswrapper[4753]: I1205 17:33:18.396456 4753 generic.go:334] "Generic (PLEG): container finished" podID="7ceaaa26-c606-4aba-95c4-837d79fac0b8" containerID="b3b6ee70c554d233f3c718078ff4c045968a451d8d7697c67f0678dac1ef2bc9" exitCode=0 Dec 05 17:33:18 crc kubenswrapper[4753]: I1205 17:33:18.396503 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-d829-account-create-update-87s9q" event={"ID":"7ceaaa26-c606-4aba-95c4-837d79fac0b8","Type":"ContainerDied","Data":"b3b6ee70c554d233f3c718078ff4c045968a451d8d7697c67f0678dac1ef2bc9"} Dec 05 17:33:18 crc kubenswrapper[4753]: I1205 17:33:18.400958 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-affb-account-create-update-m555z" event={"ID":"1f6e2320-db10-4bfb-a6ac-f2c24dcd91b1","Type":"ContainerStarted","Data":"adc1fcf906920a93bdb17848b9fa5a4e5c24dfc82e6845440cf44c106e8e65c3"} Dec 05 17:33:18 crc kubenswrapper[4753]: I1205 17:33:18.405788 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-k7h4r" event={"ID":"afe6484a-49e0-4297-b722-3ad1eb80f936","Type":"ContainerStarted","Data":"69b8c495df79566f34ea8949db87206db2d6e47bdf75de922ccfa899ad77e20f"} Dec 05 17:33:18 crc kubenswrapper[4753]: I1205 17:33:18.408777 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-58dh4" event={"ID":"842c7431-4ec7-4dae-98ea-c1d6482295f9","Type":"ContainerStarted","Data":"937f2a5e4080a88e914a57b9b0bdfe8ca12291ed513d3b3b14c876f10584e8b5"} Dec 05 17:33:18 crc kubenswrapper[4753]: I1205 17:33:18.408858 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-58dh4" event={"ID":"842c7431-4ec7-4dae-98ea-c1d6482295f9","Type":"ContainerStarted","Data":"9fe69f6cab638ed75d15ba6ab3130ca5c1d9091e546b385708e966c9604cee71"} Dec 05 17:33:18 crc kubenswrapper[4753]: I1205 17:33:18.435895 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-create-58dh4" podStartSLOduration=2.435879759 podStartE2EDuration="2.435879759s" podCreationTimestamp="2025-12-05 17:33:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:33:18.431483084 +0000 UTC m=+1736.934590090" watchObservedRunningTime="2025-12-05 17:33:18.435879759 +0000 UTC m=+1736.938986765" Dec 05 17:33:18 crc kubenswrapper[4753]: I1205 17:33:18.720480 4753 scope.go:117] "RemoveContainer" containerID="87682a74661e693e498cd793cc20d16fc9f4a3b8a1a6b54f10285e2dcd15eafd" Dec 05 17:33:18 crc kubenswrapper[4753]: E1205 17:33:18.720782 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.063682 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-m2cm2"] Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.067732 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-m2cm2" Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.081277 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-m2cm2"] Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.190270 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-a894-account-create-update-rg285"] Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.191501 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-a894-account-create-update-rg285" Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.193457 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.199469 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4n6dx\" (UniqueName: \"kubernetes.io/projected/9e1035b6-a207-4ac0-bee6-2ce590e2101e-kube-api-access-4n6dx\") pod \"keystone-db-create-m2cm2\" (UID: \"9e1035b6-a207-4ac0-bee6-2ce590e2101e\") " pod="openstack/keystone-db-create-m2cm2" Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.199623 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9e1035b6-a207-4ac0-bee6-2ce590e2101e-operator-scripts\") pod \"keystone-db-create-m2cm2\" (UID: \"9e1035b6-a207-4ac0-bee6-2ce590e2101e\") " pod="openstack/keystone-db-create-m2cm2" Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.200897 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-a894-account-create-update-rg285"] Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.301750 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9e1035b6-a207-4ac0-bee6-2ce590e2101e-operator-scripts\") pod \"keystone-db-create-m2cm2\" (UID: \"9e1035b6-a207-4ac0-bee6-2ce590e2101e\") " pod="openstack/keystone-db-create-m2cm2" Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.301851 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bw5qn\" (UniqueName: \"kubernetes.io/projected/821491b7-948f-4be8-b01f-90168f29ab11-kube-api-access-bw5qn\") pod \"keystone-a894-account-create-update-rg285\" (UID: \"821491b7-948f-4be8-b01f-90168f29ab11\") " pod="openstack/keystone-a894-account-create-update-rg285" Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.301882 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4n6dx\" (UniqueName: \"kubernetes.io/projected/9e1035b6-a207-4ac0-bee6-2ce590e2101e-kube-api-access-4n6dx\") pod \"keystone-db-create-m2cm2\" (UID: \"9e1035b6-a207-4ac0-bee6-2ce590e2101e\") " pod="openstack/keystone-db-create-m2cm2" Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.301985 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/821491b7-948f-4be8-b01f-90168f29ab11-operator-scripts\") pod \"keystone-a894-account-create-update-rg285\" (UID: \"821491b7-948f-4be8-b01f-90168f29ab11\") " pod="openstack/keystone-a894-account-create-update-rg285" Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.302747 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9e1035b6-a207-4ac0-bee6-2ce590e2101e-operator-scripts\") pod \"keystone-db-create-m2cm2\" (UID: \"9e1035b6-a207-4ac0-bee6-2ce590e2101e\") " pod="openstack/keystone-db-create-m2cm2" Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.321607 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4n6dx\" (UniqueName: \"kubernetes.io/projected/9e1035b6-a207-4ac0-bee6-2ce590e2101e-kube-api-access-4n6dx\") pod \"keystone-db-create-m2cm2\" (UID: \"9e1035b6-a207-4ac0-bee6-2ce590e2101e\") " pod="openstack/keystone-db-create-m2cm2" Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.372081 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-xbccd"] Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.373741 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-xbccd" Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.390612 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-m2cm2" Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.401757 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-xbccd"] Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.403991 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bw5qn\" (UniqueName: \"kubernetes.io/projected/821491b7-948f-4be8-b01f-90168f29ab11-kube-api-access-bw5qn\") pod \"keystone-a894-account-create-update-rg285\" (UID: \"821491b7-948f-4be8-b01f-90168f29ab11\") " pod="openstack/keystone-a894-account-create-update-rg285" Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.404076 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/821491b7-948f-4be8-b01f-90168f29ab11-operator-scripts\") pod \"keystone-a894-account-create-update-rg285\" (UID: \"821491b7-948f-4be8-b01f-90168f29ab11\") " pod="openstack/keystone-a894-account-create-update-rg285" Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.405666 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/821491b7-948f-4be8-b01f-90168f29ab11-operator-scripts\") pod \"keystone-a894-account-create-update-rg285\" (UID: \"821491b7-948f-4be8-b01f-90168f29ab11\") " pod="openstack/keystone-a894-account-create-update-rg285" Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.433204 4753 generic.go:334] "Generic (PLEG): container finished" podID="5af8977e-c995-4b12-a427-c4223f563be6" containerID="edeeac0d7fe94a0ce91bd935d3e74594522aba255e6adea0a00aedec50662ba6" exitCode=0 Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.433294 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-1e4c-account-create-update-5s55z" event={"ID":"5af8977e-c995-4b12-a427-c4223f563be6","Type":"ContainerDied","Data":"edeeac0d7fe94a0ce91bd935d3e74594522aba255e6adea0a00aedec50662ba6"} Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.433724 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bw5qn\" (UniqueName: \"kubernetes.io/projected/821491b7-948f-4be8-b01f-90168f29ab11-kube-api-access-bw5qn\") pod \"keystone-a894-account-create-update-rg285\" (UID: \"821491b7-948f-4be8-b01f-90168f29ab11\") " pod="openstack/keystone-a894-account-create-update-rg285" Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.437370 4753 generic.go:334] "Generic (PLEG): container finished" podID="1f6e2320-db10-4bfb-a6ac-f2c24dcd91b1" containerID="23e332dd14613502cff19f7e08e6aaad1d1ba9fa8cde51a702b7014210ca4f25" exitCode=0 Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.437460 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-affb-account-create-update-m555z" event={"ID":"1f6e2320-db10-4bfb-a6ac-f2c24dcd91b1","Type":"ContainerDied","Data":"23e332dd14613502cff19f7e08e6aaad1d1ba9fa8cde51a702b7014210ca4f25"} Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.454559 4753 generic.go:334] "Generic (PLEG): container finished" podID="afe6484a-49e0-4297-b722-3ad1eb80f936" containerID="3268fbee62474ac7876e00c571e9d7af81d8b948899ea0cbc2e32e6da28ac980" exitCode=0 Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.454669 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-k7h4r" event={"ID":"afe6484a-49e0-4297-b722-3ad1eb80f936","Type":"ContainerDied","Data":"3268fbee62474ac7876e00c571e9d7af81d8b948899ea0cbc2e32e6da28ac980"} Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.464308 4753 generic.go:334] "Generic (PLEG): container finished" podID="842c7431-4ec7-4dae-98ea-c1d6482295f9" containerID="937f2a5e4080a88e914a57b9b0bdfe8ca12291ed513d3b3b14c876f10584e8b5" exitCode=0 Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.464416 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-58dh4" event={"ID":"842c7431-4ec7-4dae-98ea-c1d6482295f9","Type":"ContainerDied","Data":"937f2a5e4080a88e914a57b9b0bdfe8ca12291ed513d3b3b14c876f10584e8b5"} Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.475486 4753 generic.go:334] "Generic (PLEG): container finished" podID="00d86d0d-6896-435e-808f-eec9c8225f99" containerID="627652aba9b6ecfe16b97eeb90039e680953cd504bd990ff269a9b3854a69148" exitCode=0 Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.475548 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-443d-account-create-update-fzttf" event={"ID":"00d86d0d-6896-435e-808f-eec9c8225f99","Type":"ContainerDied","Data":"627652aba9b6ecfe16b97eeb90039e680953cd504bd990ff269a9b3854a69148"} Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.476587 4753 generic.go:334] "Generic (PLEG): container finished" podID="eb4d18c1-5c7a-49e1-8662-29758295058b" containerID="35c4e45d1ccf2ebe69ce1e80cffe58987ee87cdee0530a1b9fabd3d9a618bca6" exitCode=0 Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.476630 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-96mhr" event={"ID":"eb4d18c1-5c7a-49e1-8662-29758295058b","Type":"ContainerDied","Data":"35c4e45d1ccf2ebe69ce1e80cffe58987ee87cdee0530a1b9fabd3d9a618bca6"} Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.482044 4753 generic.go:334] "Generic (PLEG): container finished" podID="dc2dad3f-2686-4fdc-8686-aeed53073cd2" containerID="412bb4c7d740b7f3e594de262a6dab3b328b0f72159f15e79fe2bdf65c6f87e3" exitCode=0 Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.482252 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-create-ks6km" event={"ID":"dc2dad3f-2686-4fdc-8686-aeed53073cd2","Type":"ContainerDied","Data":"412bb4c7d740b7f3e594de262a6dab3b328b0f72159f15e79fe2bdf65c6f87e3"} Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.500803 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-a206-account-create-update-999f6"] Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.504520 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-a206-account-create-update-999f6" Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.505286 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m4jw7\" (UniqueName: \"kubernetes.io/projected/0ecd357b-ae3c-42f0-a90b-f09e00af942a-kube-api-access-m4jw7\") pod \"placement-db-create-xbccd\" (UID: \"0ecd357b-ae3c-42f0-a90b-f09e00af942a\") " pod="openstack/placement-db-create-xbccd" Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.505435 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0ecd357b-ae3c-42f0-a90b-f09e00af942a-operator-scripts\") pod \"placement-db-create-xbccd\" (UID: \"0ecd357b-ae3c-42f0-a90b-f09e00af942a\") " pod="openstack/placement-db-create-xbccd" Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.507602 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-a894-account-create-update-rg285" Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.513276 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.566853 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-a206-account-create-update-999f6"] Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.611908 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m4jw7\" (UniqueName: \"kubernetes.io/projected/0ecd357b-ae3c-42f0-a90b-f09e00af942a-kube-api-access-m4jw7\") pod \"placement-db-create-xbccd\" (UID: \"0ecd357b-ae3c-42f0-a90b-f09e00af942a\") " pod="openstack/placement-db-create-xbccd" Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.612057 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/26f56e94-9cee-4adb-a29e-74661b598739-operator-scripts\") pod \"placement-a206-account-create-update-999f6\" (UID: \"26f56e94-9cee-4adb-a29e-74661b598739\") " pod="openstack/placement-a206-account-create-update-999f6" Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.612111 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jkwsz\" (UniqueName: \"kubernetes.io/projected/26f56e94-9cee-4adb-a29e-74661b598739-kube-api-access-jkwsz\") pod \"placement-a206-account-create-update-999f6\" (UID: \"26f56e94-9cee-4adb-a29e-74661b598739\") " pod="openstack/placement-a206-account-create-update-999f6" Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.612139 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0ecd357b-ae3c-42f0-a90b-f09e00af942a-operator-scripts\") pod \"placement-db-create-xbccd\" (UID: \"0ecd357b-ae3c-42f0-a90b-f09e00af942a\") " pod="openstack/placement-db-create-xbccd" Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.613138 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0ecd357b-ae3c-42f0-a90b-f09e00af942a-operator-scripts\") pod \"placement-db-create-xbccd\" (UID: \"0ecd357b-ae3c-42f0-a90b-f09e00af942a\") " pod="openstack/placement-db-create-xbccd" Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.652872 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m4jw7\" (UniqueName: \"kubernetes.io/projected/0ecd357b-ae3c-42f0-a90b-f09e00af942a-kube-api-access-m4jw7\") pod \"placement-db-create-xbccd\" (UID: \"0ecd357b-ae3c-42f0-a90b-f09e00af942a\") " pod="openstack/placement-db-create-xbccd" Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.688443 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-pz8kp"] Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.690686 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-xbccd" Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.691873 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-pz8kp" Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.716486 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jkwsz\" (UniqueName: \"kubernetes.io/projected/26f56e94-9cee-4adb-a29e-74661b598739-kube-api-access-jkwsz\") pod \"placement-a206-account-create-update-999f6\" (UID: \"26f56e94-9cee-4adb-a29e-74661b598739\") " pod="openstack/placement-a206-account-create-update-999f6" Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.716671 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/26f56e94-9cee-4adb-a29e-74661b598739-operator-scripts\") pod \"placement-a206-account-create-update-999f6\" (UID: \"26f56e94-9cee-4adb-a29e-74661b598739\") " pod="openstack/placement-a206-account-create-update-999f6" Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.717443 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/26f56e94-9cee-4adb-a29e-74661b598739-operator-scripts\") pod \"placement-a206-account-create-update-999f6\" (UID: \"26f56e94-9cee-4adb-a29e-74661b598739\") " pod="openstack/placement-a206-account-create-update-999f6" Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.721895 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-pz8kp"] Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.777043 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jkwsz\" (UniqueName: \"kubernetes.io/projected/26f56e94-9cee-4adb-a29e-74661b598739-kube-api-access-jkwsz\") pod \"placement-a206-account-create-update-999f6\" (UID: \"26f56e94-9cee-4adb-a29e-74661b598739\") " pod="openstack/placement-a206-account-create-update-999f6" Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.803295 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-313f-account-create-update-kbxsm"] Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.804665 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-313f-account-create-update-kbxsm" Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.808394 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.819668 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-313f-account-create-update-kbxsm"] Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.821605 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5bd95044-6b92-43ac-8bab-05b16590530c-operator-scripts\") pod \"glance-313f-account-create-update-kbxsm\" (UID: \"5bd95044-6b92-43ac-8bab-05b16590530c\") " pod="openstack/glance-313f-account-create-update-kbxsm" Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.821639 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hqf6r\" (UniqueName: \"kubernetes.io/projected/5bd95044-6b92-43ac-8bab-05b16590530c-kube-api-access-hqf6r\") pod \"glance-313f-account-create-update-kbxsm\" (UID: \"5bd95044-6b92-43ac-8bab-05b16590530c\") " pod="openstack/glance-313f-account-create-update-kbxsm" Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.821885 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7vcht\" (UniqueName: \"kubernetes.io/projected/01bbb12f-14b3-4fdb-972e-e33615efe1a3-kube-api-access-7vcht\") pod \"glance-db-create-pz8kp\" (UID: \"01bbb12f-14b3-4fdb-972e-e33615efe1a3\") " pod="openstack/glance-db-create-pz8kp" Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.821909 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/01bbb12f-14b3-4fdb-972e-e33615efe1a3-operator-scripts\") pod \"glance-db-create-pz8kp\" (UID: \"01bbb12f-14b3-4fdb-972e-e33615efe1a3\") " pod="openstack/glance-db-create-pz8kp" Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.849204 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-a206-account-create-update-999f6" Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.925722 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5bd95044-6b92-43ac-8bab-05b16590530c-operator-scripts\") pod \"glance-313f-account-create-update-kbxsm\" (UID: \"5bd95044-6b92-43ac-8bab-05b16590530c\") " pod="openstack/glance-313f-account-create-update-kbxsm" Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.925782 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hqf6r\" (UniqueName: \"kubernetes.io/projected/5bd95044-6b92-43ac-8bab-05b16590530c-kube-api-access-hqf6r\") pod \"glance-313f-account-create-update-kbxsm\" (UID: \"5bd95044-6b92-43ac-8bab-05b16590530c\") " pod="openstack/glance-313f-account-create-update-kbxsm" Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.925919 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7vcht\" (UniqueName: \"kubernetes.io/projected/01bbb12f-14b3-4fdb-972e-e33615efe1a3-kube-api-access-7vcht\") pod \"glance-db-create-pz8kp\" (UID: \"01bbb12f-14b3-4fdb-972e-e33615efe1a3\") " pod="openstack/glance-db-create-pz8kp" Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.925945 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/01bbb12f-14b3-4fdb-972e-e33615efe1a3-operator-scripts\") pod \"glance-db-create-pz8kp\" (UID: \"01bbb12f-14b3-4fdb-972e-e33615efe1a3\") " pod="openstack/glance-db-create-pz8kp" Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.926732 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/01bbb12f-14b3-4fdb-972e-e33615efe1a3-operator-scripts\") pod \"glance-db-create-pz8kp\" (UID: \"01bbb12f-14b3-4fdb-972e-e33615efe1a3\") " pod="openstack/glance-db-create-pz8kp" Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.927206 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5bd95044-6b92-43ac-8bab-05b16590530c-operator-scripts\") pod \"glance-313f-account-create-update-kbxsm\" (UID: \"5bd95044-6b92-43ac-8bab-05b16590530c\") " pod="openstack/glance-313f-account-create-update-kbxsm" Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.966825 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hqf6r\" (UniqueName: \"kubernetes.io/projected/5bd95044-6b92-43ac-8bab-05b16590530c-kube-api-access-hqf6r\") pod \"glance-313f-account-create-update-kbxsm\" (UID: \"5bd95044-6b92-43ac-8bab-05b16590530c\") " pod="openstack/glance-313f-account-create-update-kbxsm" Dec 05 17:33:19 crc kubenswrapper[4753]: I1205 17:33:19.972441 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7vcht\" (UniqueName: \"kubernetes.io/projected/01bbb12f-14b3-4fdb-972e-e33615efe1a3-kube-api-access-7vcht\") pod \"glance-db-create-pz8kp\" (UID: \"01bbb12f-14b3-4fdb-972e-e33615efe1a3\") " pod="openstack/glance-db-create-pz8kp" Dec 05 17:33:20 crc kubenswrapper[4753]: I1205 17:33:20.047610 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-pz8kp" Dec 05 17:33:20 crc kubenswrapper[4753]: I1205 17:33:20.170119 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-313f-account-create-update-kbxsm" Dec 05 17:33:20 crc kubenswrapper[4753]: I1205 17:33:20.278965 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-m2cm2"] Dec 05 17:33:20 crc kubenswrapper[4753]: I1205 17:33:20.282369 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-d829-account-create-update-87s9q" Dec 05 17:33:20 crc kubenswrapper[4753]: I1205 17:33:20.346077 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7ceaaa26-c606-4aba-95c4-837d79fac0b8-operator-scripts\") pod \"7ceaaa26-c606-4aba-95c4-837d79fac0b8\" (UID: \"7ceaaa26-c606-4aba-95c4-837d79fac0b8\") " Dec 05 17:33:20 crc kubenswrapper[4753]: I1205 17:33:20.346310 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4v4rm\" (UniqueName: \"kubernetes.io/projected/7ceaaa26-c606-4aba-95c4-837d79fac0b8-kube-api-access-4v4rm\") pod \"7ceaaa26-c606-4aba-95c4-837d79fac0b8\" (UID: \"7ceaaa26-c606-4aba-95c4-837d79fac0b8\") " Dec 05 17:33:20 crc kubenswrapper[4753]: I1205 17:33:20.350235 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7ceaaa26-c606-4aba-95c4-837d79fac0b8-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7ceaaa26-c606-4aba-95c4-837d79fac0b8" (UID: "7ceaaa26-c606-4aba-95c4-837d79fac0b8"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:20 crc kubenswrapper[4753]: I1205 17:33:20.367881 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7ceaaa26-c606-4aba-95c4-837d79fac0b8-kube-api-access-4v4rm" (OuterVolumeSpecName: "kube-api-access-4v4rm") pod "7ceaaa26-c606-4aba-95c4-837d79fac0b8" (UID: "7ceaaa26-c606-4aba-95c4-837d79fac0b8"). InnerVolumeSpecName "kube-api-access-4v4rm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:20 crc kubenswrapper[4753]: I1205 17:33:20.448352 4753 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7ceaaa26-c606-4aba-95c4-837d79fac0b8-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:20 crc kubenswrapper[4753]: I1205 17:33:20.448384 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4v4rm\" (UniqueName: \"kubernetes.io/projected/7ceaaa26-c606-4aba-95c4-837d79fac0b8-kube-api-access-4v4rm\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:20 crc kubenswrapper[4753]: I1205 17:33:20.512322 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-m2cm2" event={"ID":"9e1035b6-a207-4ac0-bee6-2ce590e2101e","Type":"ContainerStarted","Data":"5f435f55b5665028d0a15f6f65d0ffa389825b74adcd857adc7f6044a836ea3e"} Dec 05 17:33:20 crc kubenswrapper[4753]: I1205 17:33:20.555648 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-d829-account-create-update-87s9q" Dec 05 17:33:20 crc kubenswrapper[4753]: I1205 17:33:20.556786 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-d829-account-create-update-87s9q" event={"ID":"7ceaaa26-c606-4aba-95c4-837d79fac0b8","Type":"ContainerDied","Data":"632304badd88598afaa22d84b0c3fc0d5cb189912c181d033c2f2ee2f9e31bb3"} Dec 05 17:33:20 crc kubenswrapper[4753]: I1205 17:33:20.556827 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="632304badd88598afaa22d84b0c3fc0d5cb189912c181d033c2f2ee2f9e31bb3" Dec 05 17:33:20 crc kubenswrapper[4753]: I1205 17:33:20.678910 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-a894-account-create-update-rg285"] Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.132980 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.133763 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="c44e1a93-e233-46a2-b18a-e6c8c396a394" containerName="prometheus" containerID="cri-o://049f8aa25e2c75d1e632b63ae6779813096ab36c26a5352edb4719110afbe477" gracePeriod=600 Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.133851 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="c44e1a93-e233-46a2-b18a-e6c8c396a394" containerName="config-reloader" containerID="cri-o://7eb464d7df6e8d1abc098e2c5cdf2192069287ec1fec6846c7a1b7a928c9a1eb" gracePeriod=600 Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.135026 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/prometheus-metric-storage-0" podUID="c44e1a93-e233-46a2-b18a-e6c8c396a394" containerName="thanos-sidecar" containerID="cri-o://266a9429f9ae2d3556d9ee339e91cfca6aa2516444e3c92a995ef68403d280db" gracePeriod=600 Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.377591 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-1e4c-account-create-update-5s55z" Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.384342 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-58dh4" Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.434448 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-create-ks6km" Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.447051 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-affb-account-create-update-m555z" Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.460237 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-k7h4r" Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.498982 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-xbccd"] Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.506802 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hbpvt\" (UniqueName: \"kubernetes.io/projected/5af8977e-c995-4b12-a427-c4223f563be6-kube-api-access-hbpvt\") pod \"5af8977e-c995-4b12-a427-c4223f563be6\" (UID: \"5af8977e-c995-4b12-a427-c4223f563be6\") " Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.506922 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r228z\" (UniqueName: \"kubernetes.io/projected/dc2dad3f-2686-4fdc-8686-aeed53073cd2-kube-api-access-r228z\") pod \"dc2dad3f-2686-4fdc-8686-aeed53073cd2\" (UID: \"dc2dad3f-2686-4fdc-8686-aeed53073cd2\") " Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.506954 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1f6e2320-db10-4bfb-a6ac-f2c24dcd91b1-operator-scripts\") pod \"1f6e2320-db10-4bfb-a6ac-f2c24dcd91b1\" (UID: \"1f6e2320-db10-4bfb-a6ac-f2c24dcd91b1\") " Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.507039 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gdkts\" (UniqueName: \"kubernetes.io/projected/842c7431-4ec7-4dae-98ea-c1d6482295f9-kube-api-access-gdkts\") pod \"842c7431-4ec7-4dae-98ea-c1d6482295f9\" (UID: \"842c7431-4ec7-4dae-98ea-c1d6482295f9\") " Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.507068 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hrv5h\" (UniqueName: \"kubernetes.io/projected/afe6484a-49e0-4297-b722-3ad1eb80f936-kube-api-access-hrv5h\") pod \"afe6484a-49e0-4297-b722-3ad1eb80f936\" (UID: \"afe6484a-49e0-4297-b722-3ad1eb80f936\") " Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.507106 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/afe6484a-49e0-4297-b722-3ad1eb80f936-operator-scripts\") pod \"afe6484a-49e0-4297-b722-3ad1eb80f936\" (UID: \"afe6484a-49e0-4297-b722-3ad1eb80f936\") " Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.507136 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/842c7431-4ec7-4dae-98ea-c1d6482295f9-operator-scripts\") pod \"842c7431-4ec7-4dae-98ea-c1d6482295f9\" (UID: \"842c7431-4ec7-4dae-98ea-c1d6482295f9\") " Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.507247 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5af8977e-c995-4b12-a427-c4223f563be6-operator-scripts\") pod \"5af8977e-c995-4b12-a427-c4223f563be6\" (UID: \"5af8977e-c995-4b12-a427-c4223f563be6\") " Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.507340 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qnm4g\" (UniqueName: \"kubernetes.io/projected/1f6e2320-db10-4bfb-a6ac-f2c24dcd91b1-kube-api-access-qnm4g\") pod \"1f6e2320-db10-4bfb-a6ac-f2c24dcd91b1\" (UID: \"1f6e2320-db10-4bfb-a6ac-f2c24dcd91b1\") " Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.507367 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dc2dad3f-2686-4fdc-8686-aeed53073cd2-operator-scripts\") pod \"dc2dad3f-2686-4fdc-8686-aeed53073cd2\" (UID: \"dc2dad3f-2686-4fdc-8686-aeed53073cd2\") " Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.510107 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dc2dad3f-2686-4fdc-8686-aeed53073cd2-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "dc2dad3f-2686-4fdc-8686-aeed53073cd2" (UID: "dc2dad3f-2686-4fdc-8686-aeed53073cd2"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.510713 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/afe6484a-49e0-4297-b722-3ad1eb80f936-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "afe6484a-49e0-4297-b722-3ad1eb80f936" (UID: "afe6484a-49e0-4297-b722-3ad1eb80f936"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.511075 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1f6e2320-db10-4bfb-a6ac-f2c24dcd91b1-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "1f6e2320-db10-4bfb-a6ac-f2c24dcd91b1" (UID: "1f6e2320-db10-4bfb-a6ac-f2c24dcd91b1"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.513020 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5af8977e-c995-4b12-a427-c4223f563be6-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "5af8977e-c995-4b12-a427-c4223f563be6" (UID: "5af8977e-c995-4b12-a427-c4223f563be6"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.515134 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/842c7431-4ec7-4dae-98ea-c1d6482295f9-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "842c7431-4ec7-4dae-98ea-c1d6482295f9" (UID: "842c7431-4ec7-4dae-98ea-c1d6482295f9"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.516336 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/afe6484a-49e0-4297-b722-3ad1eb80f936-kube-api-access-hrv5h" (OuterVolumeSpecName: "kube-api-access-hrv5h") pod "afe6484a-49e0-4297-b722-3ad1eb80f936" (UID: "afe6484a-49e0-4297-b722-3ad1eb80f936"). InnerVolumeSpecName "kube-api-access-hrv5h". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.520616 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/842c7431-4ec7-4dae-98ea-c1d6482295f9-kube-api-access-gdkts" (OuterVolumeSpecName: "kube-api-access-gdkts") pod "842c7431-4ec7-4dae-98ea-c1d6482295f9" (UID: "842c7431-4ec7-4dae-98ea-c1d6482295f9"). InnerVolumeSpecName "kube-api-access-gdkts". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.522044 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1f6e2320-db10-4bfb-a6ac-f2c24dcd91b1-kube-api-access-qnm4g" (OuterVolumeSpecName: "kube-api-access-qnm4g") pod "1f6e2320-db10-4bfb-a6ac-f2c24dcd91b1" (UID: "1f6e2320-db10-4bfb-a6ac-f2c24dcd91b1"). InnerVolumeSpecName "kube-api-access-qnm4g". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.522219 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5af8977e-c995-4b12-a427-c4223f563be6-kube-api-access-hbpvt" (OuterVolumeSpecName: "kube-api-access-hbpvt") pod "5af8977e-c995-4b12-a427-c4223f563be6" (UID: "5af8977e-c995-4b12-a427-c4223f563be6"). InnerVolumeSpecName "kube-api-access-hbpvt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.535104 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dc2dad3f-2686-4fdc-8686-aeed53073cd2-kube-api-access-r228z" (OuterVolumeSpecName: "kube-api-access-r228z") pod "dc2dad3f-2686-4fdc-8686-aeed53073cd2" (UID: "dc2dad3f-2686-4fdc-8686-aeed53073cd2"). InnerVolumeSpecName "kube-api-access-r228z". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.555971 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-a206-account-create-update-999f6"] Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.575557 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-96mhr" event={"ID":"eb4d18c1-5c7a-49e1-8662-29758295058b","Type":"ContainerDied","Data":"d5a6640ded8a752d98fe57426bbadee987ec34e766f4d3b246b3253a11fa8139"} Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.575607 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d5a6640ded8a752d98fe57426bbadee987ec34e766f4d3b246b3253a11fa8139" Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.579867 4753 generic.go:334] "Generic (PLEG): container finished" podID="9e1035b6-a207-4ac0-bee6-2ce590e2101e" containerID="7fd88d83f0f6c452714e7196aa44e0490ec2d551ca776d088b47c2d228a12ab8" exitCode=0 Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.579925 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-m2cm2" event={"ID":"9e1035b6-a207-4ac0-bee6-2ce590e2101e","Type":"ContainerDied","Data":"7fd88d83f0f6c452714e7196aa44e0490ec2d551ca776d088b47c2d228a12ab8"} Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.581474 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-1e4c-account-create-update-5s55z" event={"ID":"5af8977e-c995-4b12-a427-c4223f563be6","Type":"ContainerDied","Data":"c1e6a03c894742282798bb9ac61f1ba31147b869bcbe8706f8249b158e04bd85"} Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.581497 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c1e6a03c894742282798bb9ac61f1ba31147b869bcbe8706f8249b158e04bd85" Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.581532 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-1e4c-account-create-update-5s55z" Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.583242 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-affb-account-create-update-m555z" event={"ID":"1f6e2320-db10-4bfb-a6ac-f2c24dcd91b1","Type":"ContainerDied","Data":"adc1fcf906920a93bdb17848b9fa5a4e5c24dfc82e6845440cf44c106e8e65c3"} Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.583268 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="adc1fcf906920a93bdb17848b9fa5a4e5c24dfc82e6845440cf44c106e8e65c3" Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.583269 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-affb-account-create-update-m555z" Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.596434 4753 generic.go:334] "Generic (PLEG): container finished" podID="821491b7-948f-4be8-b01f-90168f29ab11" containerID="80f6493520b041b792a9da767c061027d7f87b30a886b98911da63d6e47ede9d" exitCode=0 Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.596521 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-a894-account-create-update-rg285" event={"ID":"821491b7-948f-4be8-b01f-90168f29ab11","Type":"ContainerDied","Data":"80f6493520b041b792a9da767c061027d7f87b30a886b98911da63d6e47ede9d"} Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.596545 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-a894-account-create-update-rg285" event={"ID":"821491b7-948f-4be8-b01f-90168f29ab11","Type":"ContainerStarted","Data":"83a8612e5141e7f049df1de270af642916b7c47f2bacb7206d1fe64260a85f1e"} Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.598630 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-58dh4" event={"ID":"842c7431-4ec7-4dae-98ea-c1d6482295f9","Type":"ContainerDied","Data":"9fe69f6cab638ed75d15ba6ab3130ca5c1d9091e546b385708e966c9604cee71"} Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.598655 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9fe69f6cab638ed75d15ba6ab3130ca5c1d9091e546b385708e966c9604cee71" Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.598694 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-58dh4" Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.602244 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-443d-account-create-update-fzttf" event={"ID":"00d86d0d-6896-435e-808f-eec9c8225f99","Type":"ContainerDied","Data":"2c37b039ede04a74c28a7b3b237b8ffc9042ad09c0aa4c14d4a4581c2a6c11af"} Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.602280 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2c37b039ede04a74c28a7b3b237b8ffc9042ad09c0aa4c14d4a4581c2a6c11af" Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.605931 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-create-ks6km" event={"ID":"dc2dad3f-2686-4fdc-8686-aeed53073cd2","Type":"ContainerDied","Data":"80bde15ff616600befd893df742612c121ca9508630561143e51374d5d069382"} Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.605954 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="80bde15ff616600befd893df742612c121ca9508630561143e51374d5d069382" Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.606130 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-create-ks6km" Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.607532 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-xbccd" event={"ID":"0ecd357b-ae3c-42f0-a90b-f09e00af942a","Type":"ContainerStarted","Data":"8e6a584cbcdb011025ea15d3063596898bd3b98a307d91eb4934ab13a44ff707"} Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.613377 4753 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/842c7431-4ec7-4dae-98ea-c1d6482295f9-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.613562 4753 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5af8977e-c995-4b12-a427-c4223f563be6-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.613575 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qnm4g\" (UniqueName: \"kubernetes.io/projected/1f6e2320-db10-4bfb-a6ac-f2c24dcd91b1-kube-api-access-qnm4g\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.613586 4753 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/dc2dad3f-2686-4fdc-8686-aeed53073cd2-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.613595 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hbpvt\" (UniqueName: \"kubernetes.io/projected/5af8977e-c995-4b12-a427-c4223f563be6-kube-api-access-hbpvt\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.613603 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r228z\" (UniqueName: \"kubernetes.io/projected/dc2dad3f-2686-4fdc-8686-aeed53073cd2-kube-api-access-r228z\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.613627 4753 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1f6e2320-db10-4bfb-a6ac-f2c24dcd91b1-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.613635 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gdkts\" (UniqueName: \"kubernetes.io/projected/842c7431-4ec7-4dae-98ea-c1d6482295f9-kube-api-access-gdkts\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.613646 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hrv5h\" (UniqueName: \"kubernetes.io/projected/afe6484a-49e0-4297-b722-3ad1eb80f936-kube-api-access-hrv5h\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.613656 4753 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/afe6484a-49e0-4297-b722-3ad1eb80f936-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.617704 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-k7h4r" Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.617724 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-k7h4r" event={"ID":"afe6484a-49e0-4297-b722-3ad1eb80f936","Type":"ContainerDied","Data":"69b8c495df79566f34ea8949db87206db2d6e47bdf75de922ccfa899ad77e20f"} Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.617756 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="69b8c495df79566f34ea8949db87206db2d6e47bdf75de922ccfa899ad77e20f" Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.622607 4753 generic.go:334] "Generic (PLEG): container finished" podID="c44e1a93-e233-46a2-b18a-e6c8c396a394" containerID="266a9429f9ae2d3556d9ee339e91cfca6aa2516444e3c92a995ef68403d280db" exitCode=0 Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.622626 4753 generic.go:334] "Generic (PLEG): container finished" podID="c44e1a93-e233-46a2-b18a-e6c8c396a394" containerID="049f8aa25e2c75d1e632b63ae6779813096ab36c26a5352edb4719110afbe477" exitCode=0 Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.622643 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"c44e1a93-e233-46a2-b18a-e6c8c396a394","Type":"ContainerDied","Data":"266a9429f9ae2d3556d9ee339e91cfca6aa2516444e3c92a995ef68403d280db"} Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.622661 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"c44e1a93-e233-46a2-b18a-e6c8c396a394","Type":"ContainerDied","Data":"049f8aa25e2c75d1e632b63ae6779813096ab36c26a5352edb4719110afbe477"} Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.622770 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-443d-account-create-update-fzttf" Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.667020 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-96mhr" Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.731407 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/00d86d0d-6896-435e-808f-eec9c8225f99-operator-scripts\") pod \"00d86d0d-6896-435e-808f-eec9c8225f99\" (UID: \"00d86d0d-6896-435e-808f-eec9c8225f99\") " Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.732311 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eb4d18c1-5c7a-49e1-8662-29758295058b-operator-scripts\") pod \"eb4d18c1-5c7a-49e1-8662-29758295058b\" (UID: \"eb4d18c1-5c7a-49e1-8662-29758295058b\") " Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.732380 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qf2nt\" (UniqueName: \"kubernetes.io/projected/00d86d0d-6896-435e-808f-eec9c8225f99-kube-api-access-qf2nt\") pod \"00d86d0d-6896-435e-808f-eec9c8225f99\" (UID: \"00d86d0d-6896-435e-808f-eec9c8225f99\") " Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.732582 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9ch2t\" (UniqueName: \"kubernetes.io/projected/eb4d18c1-5c7a-49e1-8662-29758295058b-kube-api-access-9ch2t\") pod \"eb4d18c1-5c7a-49e1-8662-29758295058b\" (UID: \"eb4d18c1-5c7a-49e1-8662-29758295058b\") " Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.735258 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/00d86d0d-6896-435e-808f-eec9c8225f99-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "00d86d0d-6896-435e-808f-eec9c8225f99" (UID: "00d86d0d-6896-435e-808f-eec9c8225f99"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.740283 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eb4d18c1-5c7a-49e1-8662-29758295058b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "eb4d18c1-5c7a-49e1-8662-29758295058b" (UID: "eb4d18c1-5c7a-49e1-8662-29758295058b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.749378 4753 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/00d86d0d-6896-435e-808f-eec9c8225f99-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.749416 4753 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/eb4d18c1-5c7a-49e1-8662-29758295058b-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.750426 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/00d86d0d-6896-435e-808f-eec9c8225f99-kube-api-access-qf2nt" (OuterVolumeSpecName: "kube-api-access-qf2nt") pod "00d86d0d-6896-435e-808f-eec9c8225f99" (UID: "00d86d0d-6896-435e-808f-eec9c8225f99"). InnerVolumeSpecName "kube-api-access-qf2nt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.796823 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eb4d18c1-5c7a-49e1-8662-29758295058b-kube-api-access-9ch2t" (OuterVolumeSpecName: "kube-api-access-9ch2t") pod "eb4d18c1-5c7a-49e1-8662-29758295058b" (UID: "eb4d18c1-5c7a-49e1-8662-29758295058b"). InnerVolumeSpecName "kube-api-access-9ch2t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.850664 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qf2nt\" (UniqueName: \"kubernetes.io/projected/00d86d0d-6896-435e-808f-eec9c8225f99-kube-api-access-qf2nt\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.850698 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9ch2t\" (UniqueName: \"kubernetes.io/projected/eb4d18c1-5c7a-49e1-8662-29758295058b-kube-api-access-9ch2t\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:21 crc kubenswrapper[4753]: I1205 17:33:21.999251 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-pz8kp"] Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.068513 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-313f-account-create-update-kbxsm"] Dec 05 17:33:22 crc kubenswrapper[4753]: W1205 17:33:22.122129 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5bd95044_6b92_43ac_8bab_05b16590530c.slice/crio-0445e4ecee9c588235b681f4f4df101daeb5b9a8d2eba3560a66f5ae232aa930 WatchSource:0}: Error finding container 0445e4ecee9c588235b681f4f4df101daeb5b9a8d2eba3560a66f5ae232aa930: Status 404 returned error can't find the container with id 0445e4ecee9c588235b681f4f4df101daeb5b9a8d2eba3560a66f5ae232aa930 Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.134537 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.272386 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.359944 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/c44e1a93-e233-46a2-b18a-e6c8c396a394-config\") pod \"c44e1a93-e233-46a2-b18a-e6c8c396a394\" (UID: \"c44e1a93-e233-46a2-b18a-e6c8c396a394\") " Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.359999 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/c44e1a93-e233-46a2-b18a-e6c8c396a394-prometheus-metric-storage-rulefiles-0\") pod \"c44e1a93-e233-46a2-b18a-e6c8c396a394\" (UID: \"c44e1a93-e233-46a2-b18a-e6c8c396a394\") " Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.360098 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/c44e1a93-e233-46a2-b18a-e6c8c396a394-thanos-prometheus-http-client-file\") pod \"c44e1a93-e233-46a2-b18a-e6c8c396a394\" (UID: \"c44e1a93-e233-46a2-b18a-e6c8c396a394\") " Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.360191 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/c44e1a93-e233-46a2-b18a-e6c8c396a394-config-out\") pod \"c44e1a93-e233-46a2-b18a-e6c8c396a394\" (UID: \"c44e1a93-e233-46a2-b18a-e6c8c396a394\") " Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.361770 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c44e1a93-e233-46a2-b18a-e6c8c396a394-prometheus-metric-storage-rulefiles-0" (OuterVolumeSpecName: "prometheus-metric-storage-rulefiles-0") pod "c44e1a93-e233-46a2-b18a-e6c8c396a394" (UID: "c44e1a93-e233-46a2-b18a-e6c8c396a394"). InnerVolumeSpecName "prometheus-metric-storage-rulefiles-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.362769 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"prometheus-metric-storage-db\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f9bf6d09-4f3d-42cd-98e3-4107b8942318\") pod \"c44e1a93-e233-46a2-b18a-e6c8c396a394\" (UID: \"c44e1a93-e233-46a2-b18a-e6c8c396a394\") " Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.362817 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r654f\" (UniqueName: \"kubernetes.io/projected/c44e1a93-e233-46a2-b18a-e6c8c396a394-kube-api-access-r654f\") pod \"c44e1a93-e233-46a2-b18a-e6c8c396a394\" (UID: \"c44e1a93-e233-46a2-b18a-e6c8c396a394\") " Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.362927 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/c44e1a93-e233-46a2-b18a-e6c8c396a394-tls-assets\") pod \"c44e1a93-e233-46a2-b18a-e6c8c396a394\" (UID: \"c44e1a93-e233-46a2-b18a-e6c8c396a394\") " Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.362963 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/c44e1a93-e233-46a2-b18a-e6c8c396a394-web-config\") pod \"c44e1a93-e233-46a2-b18a-e6c8c396a394\" (UID: \"c44e1a93-e233-46a2-b18a-e6c8c396a394\") " Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.363585 4753 reconciler_common.go:293] "Volume detached for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/c44e1a93-e233-46a2-b18a-e6c8c396a394-prometheus-metric-storage-rulefiles-0\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.365506 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c44e1a93-e233-46a2-b18a-e6c8c396a394-config-out" (OuterVolumeSpecName: "config-out") pod "c44e1a93-e233-46a2-b18a-e6c8c396a394" (UID: "c44e1a93-e233-46a2-b18a-e6c8c396a394"). InnerVolumeSpecName "config-out". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.369743 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c44e1a93-e233-46a2-b18a-e6c8c396a394-tls-assets" (OuterVolumeSpecName: "tls-assets") pod "c44e1a93-e233-46a2-b18a-e6c8c396a394" (UID: "c44e1a93-e233-46a2-b18a-e6c8c396a394"). InnerVolumeSpecName "tls-assets". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.370043 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c44e1a93-e233-46a2-b18a-e6c8c396a394-thanos-prometheus-http-client-file" (OuterVolumeSpecName: "thanos-prometheus-http-client-file") pod "c44e1a93-e233-46a2-b18a-e6c8c396a394" (UID: "c44e1a93-e233-46a2-b18a-e6c8c396a394"). InnerVolumeSpecName "thanos-prometheus-http-client-file". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.370448 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c44e1a93-e233-46a2-b18a-e6c8c396a394-kube-api-access-r654f" (OuterVolumeSpecName: "kube-api-access-r654f") pod "c44e1a93-e233-46a2-b18a-e6c8c396a394" (UID: "c44e1a93-e233-46a2-b18a-e6c8c396a394"). InnerVolumeSpecName "kube-api-access-r654f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.376407 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c44e1a93-e233-46a2-b18a-e6c8c396a394-config" (OuterVolumeSpecName: "config") pod "c44e1a93-e233-46a2-b18a-e6c8c396a394" (UID: "c44e1a93-e233-46a2-b18a-e6c8c396a394"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.403210 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c44e1a93-e233-46a2-b18a-e6c8c396a394-web-config" (OuterVolumeSpecName: "web-config") pod "c44e1a93-e233-46a2-b18a-e6c8c396a394" (UID: "c44e1a93-e233-46a2-b18a-e6c8c396a394"). InnerVolumeSpecName "web-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.407341 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f9bf6d09-4f3d-42cd-98e3-4107b8942318" (OuterVolumeSpecName: "prometheus-metric-storage-db") pod "c44e1a93-e233-46a2-b18a-e6c8c396a394" (UID: "c44e1a93-e233-46a2-b18a-e6c8c396a394"). InnerVolumeSpecName "pvc-f9bf6d09-4f3d-42cd-98e3-4107b8942318". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.467478 4753 reconciler_common.go:293] "Volume detached for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/c44e1a93-e233-46a2-b18a-e6c8c396a394-tls-assets\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.467841 4753 reconciler_common.go:293] "Volume detached for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/c44e1a93-e233-46a2-b18a-e6c8c396a394-web-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.467924 4753 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/c44e1a93-e233-46a2-b18a-e6c8c396a394-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.468001 4753 reconciler_common.go:293] "Volume detached for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/c44e1a93-e233-46a2-b18a-e6c8c396a394-thanos-prometheus-http-client-file\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.468072 4753 reconciler_common.go:293] "Volume detached for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/c44e1a93-e233-46a2-b18a-e6c8c396a394-config-out\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.468201 4753 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-f9bf6d09-4f3d-42cd-98e3-4107b8942318\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f9bf6d09-4f3d-42cd-98e3-4107b8942318\") on node \"crc\" " Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.468285 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r654f\" (UniqueName: \"kubernetes.io/projected/c44e1a93-e233-46a2-b18a-e6c8c396a394-kube-api-access-r654f\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.514161 4753 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.514330 4753 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-f9bf6d09-4f3d-42cd-98e3-4107b8942318" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f9bf6d09-4f3d-42cd-98e3-4107b8942318") on node "crc" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.570286 4753 reconciler_common.go:293] "Volume detached for volume \"pvc-f9bf6d09-4f3d-42cd-98e3-4107b8942318\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f9bf6d09-4f3d-42cd-98e3-4107b8942318\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.634994 4753 generic.go:334] "Generic (PLEG): container finished" podID="0ecd357b-ae3c-42f0-a90b-f09e00af942a" containerID="64d5cf9961fdc072a99ffd6d9ed8221a2d840498d028dc8aee1ce5aa58450b8f" exitCode=0 Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.635054 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-xbccd" event={"ID":"0ecd357b-ae3c-42f0-a90b-f09e00af942a","Type":"ContainerDied","Data":"64d5cf9961fdc072a99ffd6d9ed8221a2d840498d028dc8aee1ce5aa58450b8f"} Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.639956 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-313f-account-create-update-kbxsm" event={"ID":"5bd95044-6b92-43ac-8bab-05b16590530c","Type":"ContainerStarted","Data":"ff0064874fe9c945f6d60ddbe6732bbda155c4702492d6c26e046e22237f9c4c"} Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.640137 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-313f-account-create-update-kbxsm" event={"ID":"5bd95044-6b92-43ac-8bab-05b16590530c","Type":"ContainerStarted","Data":"0445e4ecee9c588235b681f4f4df101daeb5b9a8d2eba3560a66f5ae232aa930"} Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.651655 4753 generic.go:334] "Generic (PLEG): container finished" podID="01bbb12f-14b3-4fdb-972e-e33615efe1a3" containerID="a6df7f2bb11ccf0cd801ab25c277addc7ae8dd240ee650b5bded159aae03f091" exitCode=0 Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.651900 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-pz8kp" event={"ID":"01bbb12f-14b3-4fdb-972e-e33615efe1a3","Type":"ContainerDied","Data":"a6df7f2bb11ccf0cd801ab25c277addc7ae8dd240ee650b5bded159aae03f091"} Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.651948 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-pz8kp" event={"ID":"01bbb12f-14b3-4fdb-972e-e33615efe1a3","Type":"ContainerStarted","Data":"242ed380d8cd435937096d43d3f08db7e00f3f445c207fb73b49af33b0834b07"} Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.673923 4753 generic.go:334] "Generic (PLEG): container finished" podID="c44e1a93-e233-46a2-b18a-e6c8c396a394" containerID="7eb464d7df6e8d1abc098e2c5cdf2192069287ec1fec6846c7a1b7a928c9a1eb" exitCode=0 Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.673989 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"c44e1a93-e233-46a2-b18a-e6c8c396a394","Type":"ContainerDied","Data":"7eb464d7df6e8d1abc098e2c5cdf2192069287ec1fec6846c7a1b7a928c9a1eb"} Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.674018 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"c44e1a93-e233-46a2-b18a-e6c8c396a394","Type":"ContainerDied","Data":"bba8b5942e6a1ef3de56a177c62900be2fca913372f13f8bc77d891d60ca1f17"} Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.674034 4753 scope.go:117] "RemoveContainer" containerID="266a9429f9ae2d3556d9ee339e91cfca6aa2516444e3c92a995ef68403d280db" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.674236 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.684074 4753 generic.go:334] "Generic (PLEG): container finished" podID="26f56e94-9cee-4adb-a29e-74661b598739" containerID="7f4b940c570d2aacd28aa054b78af815dd624789f6e3d88278c4363bc411f3ed" exitCode=0 Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.684470 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-a206-account-create-update-999f6" event={"ID":"26f56e94-9cee-4adb-a29e-74661b598739","Type":"ContainerDied","Data":"7f4b940c570d2aacd28aa054b78af815dd624789f6e3d88278c4363bc411f3ed"} Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.684513 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-a206-account-create-update-999f6" event={"ID":"26f56e94-9cee-4adb-a29e-74661b598739","Type":"ContainerStarted","Data":"5373693da301d5ed381f2168e40505f78dfb0741b6122d87961ed772373d4dff"} Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.684689 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-443d-account-create-update-fzttf" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.688993 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-96mhr" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.700385 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-313f-account-create-update-kbxsm" podStartSLOduration=3.700366012 podStartE2EDuration="3.700366012s" podCreationTimestamp="2025-12-05 17:33:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:33:22.675174188 +0000 UTC m=+1741.178281194" watchObservedRunningTime="2025-12-05 17:33:22.700366012 +0000 UTC m=+1741.203473018" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.822032 4753 scope.go:117] "RemoveContainer" containerID="7eb464d7df6e8d1abc098e2c5cdf2192069287ec1fec6846c7a1b7a928c9a1eb" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.846333 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.850841 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.859746 4753 scope.go:117] "RemoveContainer" containerID="049f8aa25e2c75d1e632b63ae6779813096ab36c26a5352edb4719110afbe477" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.866966 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 05 17:33:22 crc kubenswrapper[4753]: E1205 17:33:22.867486 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c44e1a93-e233-46a2-b18a-e6c8c396a394" containerName="init-config-reloader" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.867515 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="c44e1a93-e233-46a2-b18a-e6c8c396a394" containerName="init-config-reloader" Dec 05 17:33:22 crc kubenswrapper[4753]: E1205 17:33:22.867610 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5af8977e-c995-4b12-a427-c4223f563be6" containerName="mariadb-account-create-update" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.867632 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="5af8977e-c995-4b12-a427-c4223f563be6" containerName="mariadb-account-create-update" Dec 05 17:33:22 crc kubenswrapper[4753]: E1205 17:33:22.867645 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c44e1a93-e233-46a2-b18a-e6c8c396a394" containerName="thanos-sidecar" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.867655 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="c44e1a93-e233-46a2-b18a-e6c8c396a394" containerName="thanos-sidecar" Dec 05 17:33:22 crc kubenswrapper[4753]: E1205 17:33:22.867674 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb4d18c1-5c7a-49e1-8662-29758295058b" containerName="mariadb-database-create" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.867683 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb4d18c1-5c7a-49e1-8662-29758295058b" containerName="mariadb-database-create" Dec 05 17:33:22 crc kubenswrapper[4753]: E1205 17:33:22.867710 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c44e1a93-e233-46a2-b18a-e6c8c396a394" containerName="config-reloader" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.867718 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="c44e1a93-e233-46a2-b18a-e6c8c396a394" containerName="config-reloader" Dec 05 17:33:22 crc kubenswrapper[4753]: E1205 17:33:22.867733 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f6e2320-db10-4bfb-a6ac-f2c24dcd91b1" containerName="mariadb-account-create-update" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.867743 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f6e2320-db10-4bfb-a6ac-f2c24dcd91b1" containerName="mariadb-account-create-update" Dec 05 17:33:22 crc kubenswrapper[4753]: E1205 17:33:22.867757 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="842c7431-4ec7-4dae-98ea-c1d6482295f9" containerName="mariadb-database-create" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.867766 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="842c7431-4ec7-4dae-98ea-c1d6482295f9" containerName="mariadb-database-create" Dec 05 17:33:22 crc kubenswrapper[4753]: E1205 17:33:22.867777 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dc2dad3f-2686-4fdc-8686-aeed53073cd2" containerName="mariadb-database-create" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.867787 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="dc2dad3f-2686-4fdc-8686-aeed53073cd2" containerName="mariadb-database-create" Dec 05 17:33:22 crc kubenswrapper[4753]: E1205 17:33:22.867806 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ceaaa26-c606-4aba-95c4-837d79fac0b8" containerName="mariadb-account-create-update" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.867815 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ceaaa26-c606-4aba-95c4-837d79fac0b8" containerName="mariadb-account-create-update" Dec 05 17:33:22 crc kubenswrapper[4753]: E1205 17:33:22.867828 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c44e1a93-e233-46a2-b18a-e6c8c396a394" containerName="prometheus" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.867835 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="c44e1a93-e233-46a2-b18a-e6c8c396a394" containerName="prometheus" Dec 05 17:33:22 crc kubenswrapper[4753]: E1205 17:33:22.867846 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00d86d0d-6896-435e-808f-eec9c8225f99" containerName="mariadb-account-create-update" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.867853 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="00d86d0d-6896-435e-808f-eec9c8225f99" containerName="mariadb-account-create-update" Dec 05 17:33:22 crc kubenswrapper[4753]: E1205 17:33:22.867861 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="afe6484a-49e0-4297-b722-3ad1eb80f936" containerName="mariadb-database-create" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.867868 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="afe6484a-49e0-4297-b722-3ad1eb80f936" containerName="mariadb-database-create" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.868064 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="afe6484a-49e0-4297-b722-3ad1eb80f936" containerName="mariadb-database-create" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.868080 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="7ceaaa26-c606-4aba-95c4-837d79fac0b8" containerName="mariadb-account-create-update" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.868097 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="c44e1a93-e233-46a2-b18a-e6c8c396a394" containerName="prometheus" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.868109 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="00d86d0d-6896-435e-808f-eec9c8225f99" containerName="mariadb-account-create-update" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.868116 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="5af8977e-c995-4b12-a427-c4223f563be6" containerName="mariadb-account-create-update" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.868123 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="1f6e2320-db10-4bfb-a6ac-f2c24dcd91b1" containerName="mariadb-account-create-update" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.868134 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="c44e1a93-e233-46a2-b18a-e6c8c396a394" containerName="thanos-sidecar" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.868227 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="dc2dad3f-2686-4fdc-8686-aeed53073cd2" containerName="mariadb-database-create" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.868240 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="eb4d18c1-5c7a-49e1-8662-29758295058b" containerName="mariadb-database-create" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.868255 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="c44e1a93-e233-46a2-b18a-e6c8c396a394" containerName="config-reloader" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.868262 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="842c7431-4ec7-4dae-98ea-c1d6482295f9" containerName="mariadb-database-create" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.870009 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.873079 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-metric-storage-prometheus-svc" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.873184 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-web-config" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.873244 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"metric-storage-prometheus-dockercfg-44qtk" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.873274 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"prometheus-metric-storage-rulefiles-0" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.873089 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.873530 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-thanos-prometheus-http-client-file" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.900345 4753 scope.go:117] "RemoveContainer" containerID="3bb4573a77f9c3cffe68adc2ad6afa1507ba3d026176ddbf48822e35ce50e42d" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.911314 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"prometheus-metric-storage-tls-assets-0" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.917496 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.933173 4753 scope.go:117] "RemoveContainer" containerID="266a9429f9ae2d3556d9ee339e91cfca6aa2516444e3c92a995ef68403d280db" Dec 05 17:33:22 crc kubenswrapper[4753]: E1205 17:33:22.933617 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"266a9429f9ae2d3556d9ee339e91cfca6aa2516444e3c92a995ef68403d280db\": container with ID starting with 266a9429f9ae2d3556d9ee339e91cfca6aa2516444e3c92a995ef68403d280db not found: ID does not exist" containerID="266a9429f9ae2d3556d9ee339e91cfca6aa2516444e3c92a995ef68403d280db" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.933687 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"266a9429f9ae2d3556d9ee339e91cfca6aa2516444e3c92a995ef68403d280db"} err="failed to get container status \"266a9429f9ae2d3556d9ee339e91cfca6aa2516444e3c92a995ef68403d280db\": rpc error: code = NotFound desc = could not find container \"266a9429f9ae2d3556d9ee339e91cfca6aa2516444e3c92a995ef68403d280db\": container with ID starting with 266a9429f9ae2d3556d9ee339e91cfca6aa2516444e3c92a995ef68403d280db not found: ID does not exist" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.933734 4753 scope.go:117] "RemoveContainer" containerID="7eb464d7df6e8d1abc098e2c5cdf2192069287ec1fec6846c7a1b7a928c9a1eb" Dec 05 17:33:22 crc kubenswrapper[4753]: E1205 17:33:22.934039 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7eb464d7df6e8d1abc098e2c5cdf2192069287ec1fec6846c7a1b7a928c9a1eb\": container with ID starting with 7eb464d7df6e8d1abc098e2c5cdf2192069287ec1fec6846c7a1b7a928c9a1eb not found: ID does not exist" containerID="7eb464d7df6e8d1abc098e2c5cdf2192069287ec1fec6846c7a1b7a928c9a1eb" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.934087 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7eb464d7df6e8d1abc098e2c5cdf2192069287ec1fec6846c7a1b7a928c9a1eb"} err="failed to get container status \"7eb464d7df6e8d1abc098e2c5cdf2192069287ec1fec6846c7a1b7a928c9a1eb\": rpc error: code = NotFound desc = could not find container \"7eb464d7df6e8d1abc098e2c5cdf2192069287ec1fec6846c7a1b7a928c9a1eb\": container with ID starting with 7eb464d7df6e8d1abc098e2c5cdf2192069287ec1fec6846c7a1b7a928c9a1eb not found: ID does not exist" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.934115 4753 scope.go:117] "RemoveContainer" containerID="049f8aa25e2c75d1e632b63ae6779813096ab36c26a5352edb4719110afbe477" Dec 05 17:33:22 crc kubenswrapper[4753]: E1205 17:33:22.934465 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"049f8aa25e2c75d1e632b63ae6779813096ab36c26a5352edb4719110afbe477\": container with ID starting with 049f8aa25e2c75d1e632b63ae6779813096ab36c26a5352edb4719110afbe477 not found: ID does not exist" containerID="049f8aa25e2c75d1e632b63ae6779813096ab36c26a5352edb4719110afbe477" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.934506 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"049f8aa25e2c75d1e632b63ae6779813096ab36c26a5352edb4719110afbe477"} err="failed to get container status \"049f8aa25e2c75d1e632b63ae6779813096ab36c26a5352edb4719110afbe477\": rpc error: code = NotFound desc = could not find container \"049f8aa25e2c75d1e632b63ae6779813096ab36c26a5352edb4719110afbe477\": container with ID starting with 049f8aa25e2c75d1e632b63ae6779813096ab36c26a5352edb4719110afbe477 not found: ID does not exist" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.934534 4753 scope.go:117] "RemoveContainer" containerID="3bb4573a77f9c3cffe68adc2ad6afa1507ba3d026176ddbf48822e35ce50e42d" Dec 05 17:33:22 crc kubenswrapper[4753]: E1205 17:33:22.934757 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3bb4573a77f9c3cffe68adc2ad6afa1507ba3d026176ddbf48822e35ce50e42d\": container with ID starting with 3bb4573a77f9c3cffe68adc2ad6afa1507ba3d026176ddbf48822e35ce50e42d not found: ID does not exist" containerID="3bb4573a77f9c3cffe68adc2ad6afa1507ba3d026176ddbf48822e35ce50e42d" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.934783 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3bb4573a77f9c3cffe68adc2ad6afa1507ba3d026176ddbf48822e35ce50e42d"} err="failed to get container status \"3bb4573a77f9c3cffe68adc2ad6afa1507ba3d026176ddbf48822e35ce50e42d\": rpc error: code = NotFound desc = could not find container \"3bb4573a77f9c3cffe68adc2ad6afa1507ba3d026176ddbf48822e35ce50e42d\": container with ID starting with 3bb4573a77f9c3cffe68adc2ad6afa1507ba3d026176ddbf48822e35ce50e42d not found: ID does not exist" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.976606 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mjxzs\" (UniqueName: \"kubernetes.io/projected/b4a8b3a8-c966-41e1-bb1e-a054e3c3e189-kube-api-access-mjxzs\") pod \"prometheus-metric-storage-0\" (UID: \"b4a8b3a8-c966-41e1-bb1e-a054e3c3e189\") " pod="openstack/prometheus-metric-storage-0" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.976675 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/b4a8b3a8-c966-41e1-bb1e-a054e3c3e189-config\") pod \"prometheus-metric-storage-0\" (UID: \"b4a8b3a8-c966-41e1-bb1e-a054e3c3e189\") " pod="openstack/prometheus-metric-storage-0" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.976738 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/b4a8b3a8-c966-41e1-bb1e-a054e3c3e189-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"b4a8b3a8-c966-41e1-bb1e-a054e3c3e189\") " pod="openstack/prometheus-metric-storage-0" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.976800 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-f9bf6d09-4f3d-42cd-98e3-4107b8942318\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f9bf6d09-4f3d-42cd-98e3-4107b8942318\") pod \"prometheus-metric-storage-0\" (UID: \"b4a8b3a8-c966-41e1-bb1e-a054e3c3e189\") " pod="openstack/prometheus-metric-storage-0" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.976845 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/b4a8b3a8-c966-41e1-bb1e-a054e3c3e189-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"b4a8b3a8-c966-41e1-bb1e-a054e3c3e189\") " pod="openstack/prometheus-metric-storage-0" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.976869 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/b4a8b3a8-c966-41e1-bb1e-a054e3c3e189-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"b4a8b3a8-c966-41e1-bb1e-a054e3c3e189\") " pod="openstack/prometheus-metric-storage-0" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.976898 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/b4a8b3a8-c966-41e1-bb1e-a054e3c3e189-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"b4a8b3a8-c966-41e1-bb1e-a054e3c3e189\") " pod="openstack/prometheus-metric-storage-0" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.976916 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/b4a8b3a8-c966-41e1-bb1e-a054e3c3e189-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"b4a8b3a8-c966-41e1-bb1e-a054e3c3e189\") " pod="openstack/prometheus-metric-storage-0" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.976939 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/b4a8b3a8-c966-41e1-bb1e-a054e3c3e189-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"b4a8b3a8-c966-41e1-bb1e-a054e3c3e189\") " pod="openstack/prometheus-metric-storage-0" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.976966 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/b4a8b3a8-c966-41e1-bb1e-a054e3c3e189-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"b4a8b3a8-c966-41e1-bb1e-a054e3c3e189\") " pod="openstack/prometheus-metric-storage-0" Dec 05 17:33:22 crc kubenswrapper[4753]: I1205 17:33:22.977258 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4a8b3a8-c966-41e1-bb1e-a054e3c3e189-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"b4a8b3a8-c966-41e1-bb1e-a054e3c3e189\") " pod="openstack/prometheus-metric-storage-0" Dec 05 17:33:23 crc kubenswrapper[4753]: I1205 17:33:23.080229 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/b4a8b3a8-c966-41e1-bb1e-a054e3c3e189-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"b4a8b3a8-c966-41e1-bb1e-a054e3c3e189\") " pod="openstack/prometheus-metric-storage-0" Dec 05 17:33:23 crc kubenswrapper[4753]: I1205 17:33:23.080293 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/b4a8b3a8-c966-41e1-bb1e-a054e3c3e189-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"b4a8b3a8-c966-41e1-bb1e-a054e3c3e189\") " pod="openstack/prometheus-metric-storage-0" Dec 05 17:33:23 crc kubenswrapper[4753]: I1205 17:33:23.080339 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/b4a8b3a8-c966-41e1-bb1e-a054e3c3e189-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"b4a8b3a8-c966-41e1-bb1e-a054e3c3e189\") " pod="openstack/prometheus-metric-storage-0" Dec 05 17:33:23 crc kubenswrapper[4753]: I1205 17:33:23.080364 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/b4a8b3a8-c966-41e1-bb1e-a054e3c3e189-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"b4a8b3a8-c966-41e1-bb1e-a054e3c3e189\") " pod="openstack/prometheus-metric-storage-0" Dec 05 17:33:23 crc kubenswrapper[4753]: I1205 17:33:23.080399 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/b4a8b3a8-c966-41e1-bb1e-a054e3c3e189-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"b4a8b3a8-c966-41e1-bb1e-a054e3c3e189\") " pod="openstack/prometheus-metric-storage-0" Dec 05 17:33:23 crc kubenswrapper[4753]: I1205 17:33:23.080432 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/b4a8b3a8-c966-41e1-bb1e-a054e3c3e189-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"b4a8b3a8-c966-41e1-bb1e-a054e3c3e189\") " pod="openstack/prometheus-metric-storage-0" Dec 05 17:33:23 crc kubenswrapper[4753]: I1205 17:33:23.080481 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4a8b3a8-c966-41e1-bb1e-a054e3c3e189-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"b4a8b3a8-c966-41e1-bb1e-a054e3c3e189\") " pod="openstack/prometheus-metric-storage-0" Dec 05 17:33:23 crc kubenswrapper[4753]: I1205 17:33:23.080547 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mjxzs\" (UniqueName: \"kubernetes.io/projected/b4a8b3a8-c966-41e1-bb1e-a054e3c3e189-kube-api-access-mjxzs\") pod \"prometheus-metric-storage-0\" (UID: \"b4a8b3a8-c966-41e1-bb1e-a054e3c3e189\") " pod="openstack/prometheus-metric-storage-0" Dec 05 17:33:23 crc kubenswrapper[4753]: I1205 17:33:23.080578 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/b4a8b3a8-c966-41e1-bb1e-a054e3c3e189-config\") pod \"prometheus-metric-storage-0\" (UID: \"b4a8b3a8-c966-41e1-bb1e-a054e3c3e189\") " pod="openstack/prometheus-metric-storage-0" Dec 05 17:33:23 crc kubenswrapper[4753]: I1205 17:33:23.080607 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/b4a8b3a8-c966-41e1-bb1e-a054e3c3e189-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"b4a8b3a8-c966-41e1-bb1e-a054e3c3e189\") " pod="openstack/prometheus-metric-storage-0" Dec 05 17:33:23 crc kubenswrapper[4753]: I1205 17:33:23.080659 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-f9bf6d09-4f3d-42cd-98e3-4107b8942318\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f9bf6d09-4f3d-42cd-98e3-4107b8942318\") pod \"prometheus-metric-storage-0\" (UID: \"b4a8b3a8-c966-41e1-bb1e-a054e3c3e189\") " pod="openstack/prometheus-metric-storage-0" Dec 05 17:33:23 crc kubenswrapper[4753]: I1205 17:33:23.085859 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-metric-storage-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/b4a8b3a8-c966-41e1-bb1e-a054e3c3e189-prometheus-metric-storage-rulefiles-0\") pod \"prometheus-metric-storage-0\" (UID: \"b4a8b3a8-c966-41e1-bb1e-a054e3c3e189\") " pod="openstack/prometheus-metric-storage-0" Dec 05 17:33:23 crc kubenswrapper[4753]: I1205 17:33:23.087803 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/b4a8b3a8-c966-41e1-bb1e-a054e3c3e189-config-out\") pod \"prometheus-metric-storage-0\" (UID: \"b4a8b3a8-c966-41e1-bb1e-a054e3c3e189\") " pod="openstack/prometheus-metric-storage-0" Dec 05 17:33:23 crc kubenswrapper[4753]: I1205 17:33:23.090578 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"thanos-prometheus-http-client-file\" (UniqueName: \"kubernetes.io/secret/b4a8b3a8-c966-41e1-bb1e-a054e3c3e189-thanos-prometheus-http-client-file\") pod \"prometheus-metric-storage-0\" (UID: \"b4a8b3a8-c966-41e1-bb1e-a054e3c3e189\") " pod="openstack/prometheus-metric-storage-0" Dec 05 17:33:23 crc kubenswrapper[4753]: I1205 17:33:23.092696 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\" (UniqueName: \"kubernetes.io/secret/b4a8b3a8-c966-41e1-bb1e-a054e3c3e189-web-config-tls-secret-key-cert-metric-storage-promethe-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"b4a8b3a8-c966-41e1-bb1e-a054e3c3e189\") " pod="openstack/prometheus-metric-storage-0" Dec 05 17:33:23 crc kubenswrapper[4753]: I1205 17:33:23.094667 4753 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 05 17:33:23 crc kubenswrapper[4753]: I1205 17:33:23.094693 4753 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-f9bf6d09-4f3d-42cd-98e3-4107b8942318\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f9bf6d09-4f3d-42cd-98e3-4107b8942318\") pod \"prometheus-metric-storage-0\" (UID: \"b4a8b3a8-c966-41e1-bb1e-a054e3c3e189\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/041b3d1c919a392dd8ecbf05ce919b761f2a980688ec2105618d552148562637/globalmount\"" pod="openstack/prometheus-metric-storage-0" Dec 05 17:33:23 crc kubenswrapper[4753]: I1205 17:33:23.095434 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/b4a8b3a8-c966-41e1-bb1e-a054e3c3e189-tls-assets\") pod \"prometheus-metric-storage-0\" (UID: \"b4a8b3a8-c966-41e1-bb1e-a054e3c3e189\") " pod="openstack/prometheus-metric-storage-0" Dec 05 17:33:23 crc kubenswrapper[4753]: I1205 17:33:23.096077 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\" (UniqueName: \"kubernetes.io/secret/b4a8b3a8-c966-41e1-bb1e-a054e3c3e189-web-config-tls-secret-cert-cert-metric-storage-prometh-dc638c2d\") pod \"prometheus-metric-storage-0\" (UID: \"b4a8b3a8-c966-41e1-bb1e-a054e3c3e189\") " pod="openstack/prometheus-metric-storage-0" Dec 05 17:33:23 crc kubenswrapper[4753]: I1205 17:33:23.096562 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/b4a8b3a8-c966-41e1-bb1e-a054e3c3e189-web-config\") pod \"prometheus-metric-storage-0\" (UID: \"b4a8b3a8-c966-41e1-bb1e-a054e3c3e189\") " pod="openstack/prometheus-metric-storage-0" Dec 05 17:33:23 crc kubenswrapper[4753]: I1205 17:33:23.102337 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4a8b3a8-c966-41e1-bb1e-a054e3c3e189-secret-combined-ca-bundle\") pod \"prometheus-metric-storage-0\" (UID: \"b4a8b3a8-c966-41e1-bb1e-a054e3c3e189\") " pod="openstack/prometheus-metric-storage-0" Dec 05 17:33:23 crc kubenswrapper[4753]: I1205 17:33:23.109651 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/b4a8b3a8-c966-41e1-bb1e-a054e3c3e189-config\") pod \"prometheus-metric-storage-0\" (UID: \"b4a8b3a8-c966-41e1-bb1e-a054e3c3e189\") " pod="openstack/prometheus-metric-storage-0" Dec 05 17:33:23 crc kubenswrapper[4753]: I1205 17:33:23.116232 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mjxzs\" (UniqueName: \"kubernetes.io/projected/b4a8b3a8-c966-41e1-bb1e-a054e3c3e189-kube-api-access-mjxzs\") pod \"prometheus-metric-storage-0\" (UID: \"b4a8b3a8-c966-41e1-bb1e-a054e3c3e189\") " pod="openstack/prometheus-metric-storage-0" Dec 05 17:33:23 crc kubenswrapper[4753]: I1205 17:33:23.173346 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-f9bf6d09-4f3d-42cd-98e3-4107b8942318\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-f9bf6d09-4f3d-42cd-98e3-4107b8942318\") pod \"prometheus-metric-storage-0\" (UID: \"b4a8b3a8-c966-41e1-bb1e-a054e3c3e189\") " pod="openstack/prometheus-metric-storage-0" Dec 05 17:33:23 crc kubenswrapper[4753]: I1205 17:33:23.209636 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/prometheus-metric-storage-0" Dec 05 17:33:23 crc kubenswrapper[4753]: I1205 17:33:23.264028 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-m2cm2" Dec 05 17:33:23 crc kubenswrapper[4753]: I1205 17:33:23.283696 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-a894-account-create-update-rg285" Dec 05 17:33:23 crc kubenswrapper[4753]: I1205 17:33:23.385697 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bw5qn\" (UniqueName: \"kubernetes.io/projected/821491b7-948f-4be8-b01f-90168f29ab11-kube-api-access-bw5qn\") pod \"821491b7-948f-4be8-b01f-90168f29ab11\" (UID: \"821491b7-948f-4be8-b01f-90168f29ab11\") " Dec 05 17:33:23 crc kubenswrapper[4753]: I1205 17:33:23.385807 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4n6dx\" (UniqueName: \"kubernetes.io/projected/9e1035b6-a207-4ac0-bee6-2ce590e2101e-kube-api-access-4n6dx\") pod \"9e1035b6-a207-4ac0-bee6-2ce590e2101e\" (UID: \"9e1035b6-a207-4ac0-bee6-2ce590e2101e\") " Dec 05 17:33:23 crc kubenswrapper[4753]: I1205 17:33:23.386027 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/821491b7-948f-4be8-b01f-90168f29ab11-operator-scripts\") pod \"821491b7-948f-4be8-b01f-90168f29ab11\" (UID: \"821491b7-948f-4be8-b01f-90168f29ab11\") " Dec 05 17:33:23 crc kubenswrapper[4753]: I1205 17:33:23.386056 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9e1035b6-a207-4ac0-bee6-2ce590e2101e-operator-scripts\") pod \"9e1035b6-a207-4ac0-bee6-2ce590e2101e\" (UID: \"9e1035b6-a207-4ac0-bee6-2ce590e2101e\") " Dec 05 17:33:23 crc kubenswrapper[4753]: I1205 17:33:23.387101 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9e1035b6-a207-4ac0-bee6-2ce590e2101e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "9e1035b6-a207-4ac0-bee6-2ce590e2101e" (UID: "9e1035b6-a207-4ac0-bee6-2ce590e2101e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:23 crc kubenswrapper[4753]: I1205 17:33:23.392589 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/821491b7-948f-4be8-b01f-90168f29ab11-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "821491b7-948f-4be8-b01f-90168f29ab11" (UID: "821491b7-948f-4be8-b01f-90168f29ab11"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:23 crc kubenswrapper[4753]: I1205 17:33:23.393399 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/821491b7-948f-4be8-b01f-90168f29ab11-kube-api-access-bw5qn" (OuterVolumeSpecName: "kube-api-access-bw5qn") pod "821491b7-948f-4be8-b01f-90168f29ab11" (UID: "821491b7-948f-4be8-b01f-90168f29ab11"). InnerVolumeSpecName "kube-api-access-bw5qn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:23 crc kubenswrapper[4753]: I1205 17:33:23.403402 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9e1035b6-a207-4ac0-bee6-2ce590e2101e-kube-api-access-4n6dx" (OuterVolumeSpecName: "kube-api-access-4n6dx") pod "9e1035b6-a207-4ac0-bee6-2ce590e2101e" (UID: "9e1035b6-a207-4ac0-bee6-2ce590e2101e"). InnerVolumeSpecName "kube-api-access-4n6dx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:23 crc kubenswrapper[4753]: I1205 17:33:23.489524 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bw5qn\" (UniqueName: \"kubernetes.io/projected/821491b7-948f-4be8-b01f-90168f29ab11-kube-api-access-bw5qn\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:23 crc kubenswrapper[4753]: I1205 17:33:23.489562 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4n6dx\" (UniqueName: \"kubernetes.io/projected/9e1035b6-a207-4ac0-bee6-2ce590e2101e-kube-api-access-4n6dx\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:23 crc kubenswrapper[4753]: I1205 17:33:23.489574 4753 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/821491b7-948f-4be8-b01f-90168f29ab11-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:23 crc kubenswrapper[4753]: I1205 17:33:23.489584 4753 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9e1035b6-a207-4ac0-bee6-2ce590e2101e-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:23 crc kubenswrapper[4753]: I1205 17:33:23.693624 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-m2cm2" event={"ID":"9e1035b6-a207-4ac0-bee6-2ce590e2101e","Type":"ContainerDied","Data":"5f435f55b5665028d0a15f6f65d0ffa389825b74adcd857adc7f6044a836ea3e"} Dec 05 17:33:23 crc kubenswrapper[4753]: I1205 17:33:23.693660 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5f435f55b5665028d0a15f6f65d0ffa389825b74adcd857adc7f6044a836ea3e" Dec 05 17:33:23 crc kubenswrapper[4753]: I1205 17:33:23.693725 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-m2cm2" Dec 05 17:33:23 crc kubenswrapper[4753]: I1205 17:33:23.701220 4753 generic.go:334] "Generic (PLEG): container finished" podID="5bd95044-6b92-43ac-8bab-05b16590530c" containerID="ff0064874fe9c945f6d60ddbe6732bbda155c4702492d6c26e046e22237f9c4c" exitCode=0 Dec 05 17:33:23 crc kubenswrapper[4753]: I1205 17:33:23.701430 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-313f-account-create-update-kbxsm" event={"ID":"5bd95044-6b92-43ac-8bab-05b16590530c","Type":"ContainerDied","Data":"ff0064874fe9c945f6d60ddbe6732bbda155c4702492d6c26e046e22237f9c4c"} Dec 05 17:33:23 crc kubenswrapper[4753]: I1205 17:33:23.705989 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-a894-account-create-update-rg285" event={"ID":"821491b7-948f-4be8-b01f-90168f29ab11","Type":"ContainerDied","Data":"83a8612e5141e7f049df1de270af642916b7c47f2bacb7206d1fe64260a85f1e"} Dec 05 17:33:23 crc kubenswrapper[4753]: I1205 17:33:23.706032 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="83a8612e5141e7f049df1de270af642916b7c47f2bacb7206d1fe64260a85f1e" Dec 05 17:33:23 crc kubenswrapper[4753]: I1205 17:33:23.706104 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-a894-account-create-update-rg285" Dec 05 17:33:23 crc kubenswrapper[4753]: I1205 17:33:23.751521 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c44e1a93-e233-46a2-b18a-e6c8c396a394" path="/var/lib/kubelet/pods/c44e1a93-e233-46a2-b18a-e6c8c396a394/volumes" Dec 05 17:33:23 crc kubenswrapper[4753]: I1205 17:33:23.915740 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/prometheus-metric-storage-0"] Dec 05 17:33:24 crc kubenswrapper[4753]: I1205 17:33:24.100770 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f4707e97-4f70-42d5-959e-1d2c8a9629e5-etc-swift\") pod \"swift-storage-0\" (UID: \"f4707e97-4f70-42d5-959e-1d2c8a9629e5\") " pod="openstack/swift-storage-0" Dec 05 17:33:24 crc kubenswrapper[4753]: I1205 17:33:24.105389 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/f4707e97-4f70-42d5-959e-1d2c8a9629e5-etc-swift\") pod \"swift-storage-0\" (UID: \"f4707e97-4f70-42d5-959e-1d2c8a9629e5\") " pod="openstack/swift-storage-0" Dec 05 17:33:24 crc kubenswrapper[4753]: I1205 17:33:24.155029 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-xbccd" Dec 05 17:33:24 crc kubenswrapper[4753]: I1205 17:33:24.234594 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-pz8kp" Dec 05 17:33:24 crc kubenswrapper[4753]: I1205 17:33:24.255520 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 05 17:33:24 crc kubenswrapper[4753]: I1205 17:33:24.266690 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-a206-account-create-update-999f6" Dec 05 17:33:24 crc kubenswrapper[4753]: I1205 17:33:24.311690 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0ecd357b-ae3c-42f0-a90b-f09e00af942a-operator-scripts\") pod \"0ecd357b-ae3c-42f0-a90b-f09e00af942a\" (UID: \"0ecd357b-ae3c-42f0-a90b-f09e00af942a\") " Dec 05 17:33:24 crc kubenswrapper[4753]: I1205 17:33:24.311761 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m4jw7\" (UniqueName: \"kubernetes.io/projected/0ecd357b-ae3c-42f0-a90b-f09e00af942a-kube-api-access-m4jw7\") pod \"0ecd357b-ae3c-42f0-a90b-f09e00af942a\" (UID: \"0ecd357b-ae3c-42f0-a90b-f09e00af942a\") " Dec 05 17:33:24 crc kubenswrapper[4753]: I1205 17:33:24.311804 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7vcht\" (UniqueName: \"kubernetes.io/projected/01bbb12f-14b3-4fdb-972e-e33615efe1a3-kube-api-access-7vcht\") pod \"01bbb12f-14b3-4fdb-972e-e33615efe1a3\" (UID: \"01bbb12f-14b3-4fdb-972e-e33615efe1a3\") " Dec 05 17:33:24 crc kubenswrapper[4753]: I1205 17:33:24.311868 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/01bbb12f-14b3-4fdb-972e-e33615efe1a3-operator-scripts\") pod \"01bbb12f-14b3-4fdb-972e-e33615efe1a3\" (UID: \"01bbb12f-14b3-4fdb-972e-e33615efe1a3\") " Dec 05 17:33:24 crc kubenswrapper[4753]: I1205 17:33:24.314591 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01bbb12f-14b3-4fdb-972e-e33615efe1a3-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "01bbb12f-14b3-4fdb-972e-e33615efe1a3" (UID: "01bbb12f-14b3-4fdb-972e-e33615efe1a3"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:24 crc kubenswrapper[4753]: I1205 17:33:24.314999 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0ecd357b-ae3c-42f0-a90b-f09e00af942a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "0ecd357b-ae3c-42f0-a90b-f09e00af942a" (UID: "0ecd357b-ae3c-42f0-a90b-f09e00af942a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:24 crc kubenswrapper[4753]: I1205 17:33:24.321105 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0ecd357b-ae3c-42f0-a90b-f09e00af942a-kube-api-access-m4jw7" (OuterVolumeSpecName: "kube-api-access-m4jw7") pod "0ecd357b-ae3c-42f0-a90b-f09e00af942a" (UID: "0ecd357b-ae3c-42f0-a90b-f09e00af942a"). InnerVolumeSpecName "kube-api-access-m4jw7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:24 crc kubenswrapper[4753]: I1205 17:33:24.321487 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01bbb12f-14b3-4fdb-972e-e33615efe1a3-kube-api-access-7vcht" (OuterVolumeSpecName: "kube-api-access-7vcht") pod "01bbb12f-14b3-4fdb-972e-e33615efe1a3" (UID: "01bbb12f-14b3-4fdb-972e-e33615efe1a3"). InnerVolumeSpecName "kube-api-access-7vcht". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:24 crc kubenswrapper[4753]: I1205 17:33:24.352534 4753 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cloudkitty-lokistack-ingester-0" podUID="227cc7e4-602f-4c1e-afa7-0e106d3f505f" containerName="loki-ingester" probeResult="failure" output="HTTP probe failed with statuscode: 503" Dec 05 17:33:24 crc kubenswrapper[4753]: I1205 17:33:24.413426 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/26f56e94-9cee-4adb-a29e-74661b598739-operator-scripts\") pod \"26f56e94-9cee-4adb-a29e-74661b598739\" (UID: \"26f56e94-9cee-4adb-a29e-74661b598739\") " Dec 05 17:33:24 crc kubenswrapper[4753]: I1205 17:33:24.414187 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwsz\" (UniqueName: \"kubernetes.io/projected/26f56e94-9cee-4adb-a29e-74661b598739-kube-api-access-jkwsz\") pod \"26f56e94-9cee-4adb-a29e-74661b598739\" (UID: \"26f56e94-9cee-4adb-a29e-74661b598739\") " Dec 05 17:33:24 crc kubenswrapper[4753]: I1205 17:33:24.414700 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/26f56e94-9cee-4adb-a29e-74661b598739-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "26f56e94-9cee-4adb-a29e-74661b598739" (UID: "26f56e94-9cee-4adb-a29e-74661b598739"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:24 crc kubenswrapper[4753]: I1205 17:33:24.414733 4753 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/01bbb12f-14b3-4fdb-972e-e33615efe1a3-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:24 crc kubenswrapper[4753]: I1205 17:33:24.414747 4753 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0ecd357b-ae3c-42f0-a90b-f09e00af942a-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:24 crc kubenswrapper[4753]: I1205 17:33:24.414758 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m4jw7\" (UniqueName: \"kubernetes.io/projected/0ecd357b-ae3c-42f0-a90b-f09e00af942a-kube-api-access-m4jw7\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:24 crc kubenswrapper[4753]: I1205 17:33:24.414821 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7vcht\" (UniqueName: \"kubernetes.io/projected/01bbb12f-14b3-4fdb-972e-e33615efe1a3-kube-api-access-7vcht\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:24 crc kubenswrapper[4753]: I1205 17:33:24.420514 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/26f56e94-9cee-4adb-a29e-74661b598739-kube-api-access-jkwsz" (OuterVolumeSpecName: "kube-api-access-jkwsz") pod "26f56e94-9cee-4adb-a29e-74661b598739" (UID: "26f56e94-9cee-4adb-a29e-74661b598739"). InnerVolumeSpecName "kube-api-access-jkwsz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:24 crc kubenswrapper[4753]: I1205 17:33:24.517269 4753 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/26f56e94-9cee-4adb-a29e-74661b598739-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:24 crc kubenswrapper[4753]: I1205 17:33:24.517311 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwsz\" (UniqueName: \"kubernetes.io/projected/26f56e94-9cee-4adb-a29e-74661b598739-kube-api-access-jkwsz\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:24 crc kubenswrapper[4753]: I1205 17:33:24.721631 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-pz8kp" event={"ID":"01bbb12f-14b3-4fdb-972e-e33615efe1a3","Type":"ContainerDied","Data":"242ed380d8cd435937096d43d3f08db7e00f3f445c207fb73b49af33b0834b07"} Dec 05 17:33:24 crc kubenswrapper[4753]: I1205 17:33:24.721674 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="242ed380d8cd435937096d43d3f08db7e00f3f445c207fb73b49af33b0834b07" Dec 05 17:33:24 crc kubenswrapper[4753]: I1205 17:33:24.721682 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-pz8kp" Dec 05 17:33:24 crc kubenswrapper[4753]: I1205 17:33:24.723572 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-a206-account-create-update-999f6" event={"ID":"26f56e94-9cee-4adb-a29e-74661b598739","Type":"ContainerDied","Data":"5373693da301d5ed381f2168e40505f78dfb0741b6122d87961ed772373d4dff"} Dec 05 17:33:24 crc kubenswrapper[4753]: I1205 17:33:24.723611 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5373693da301d5ed381f2168e40505f78dfb0741b6122d87961ed772373d4dff" Dec 05 17:33:24 crc kubenswrapper[4753]: I1205 17:33:24.723681 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-a206-account-create-update-999f6" Dec 05 17:33:24 crc kubenswrapper[4753]: I1205 17:33:24.726637 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"b4a8b3a8-c966-41e1-bb1e-a054e3c3e189","Type":"ContainerStarted","Data":"0e3a03d2ab44ea8e8f6028e520f2b84dc6e54cb69368ac125bacba5b0efa03a2"} Dec 05 17:33:24 crc kubenswrapper[4753]: I1205 17:33:24.728605 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-xbccd" event={"ID":"0ecd357b-ae3c-42f0-a90b-f09e00af942a","Type":"ContainerDied","Data":"8e6a584cbcdb011025ea15d3063596898bd3b98a307d91eb4934ab13a44ff707"} Dec 05 17:33:24 crc kubenswrapper[4753]: I1205 17:33:24.728633 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8e6a584cbcdb011025ea15d3063596898bd3b98a307d91eb4934ab13a44ff707" Dec 05 17:33:24 crc kubenswrapper[4753]: I1205 17:33:24.728659 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-xbccd" Dec 05 17:33:24 crc kubenswrapper[4753]: I1205 17:33:24.804672 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-qhphp" Dec 05 17:33:24 crc kubenswrapper[4753]: I1205 17:33:24.808028 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-qhphp" Dec 05 17:33:24 crc kubenswrapper[4753]: I1205 17:33:24.935773 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Dec 05 17:33:24 crc kubenswrapper[4753]: W1205 17:33:24.942492 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf4707e97_4f70_42d5_959e_1d2c8a9629e5.slice/crio-3d84ebaa92dda4938444dbe66351dc70145ad06833949a0a74f258ea3d54cb14 WatchSource:0}: Error finding container 3d84ebaa92dda4938444dbe66351dc70145ad06833949a0a74f258ea3d54cb14: Status 404 returned error can't find the container with id 3d84ebaa92dda4938444dbe66351dc70145ad06833949a0a74f258ea3d54cb14 Dec 05 17:33:25 crc kubenswrapper[4753]: I1205 17:33:25.060889 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-8m7cw-config-xrzkr"] Dec 05 17:33:25 crc kubenswrapper[4753]: E1205 17:33:25.061763 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01bbb12f-14b3-4fdb-972e-e33615efe1a3" containerName="mariadb-database-create" Dec 05 17:33:25 crc kubenswrapper[4753]: I1205 17:33:25.061789 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="01bbb12f-14b3-4fdb-972e-e33615efe1a3" containerName="mariadb-database-create" Dec 05 17:33:25 crc kubenswrapper[4753]: E1205 17:33:25.061807 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e1035b6-a207-4ac0-bee6-2ce590e2101e" containerName="mariadb-database-create" Dec 05 17:33:25 crc kubenswrapper[4753]: I1205 17:33:25.061817 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e1035b6-a207-4ac0-bee6-2ce590e2101e" containerName="mariadb-database-create" Dec 05 17:33:25 crc kubenswrapper[4753]: E1205 17:33:25.061852 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="821491b7-948f-4be8-b01f-90168f29ab11" containerName="mariadb-account-create-update" Dec 05 17:33:25 crc kubenswrapper[4753]: I1205 17:33:25.061860 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="821491b7-948f-4be8-b01f-90168f29ab11" containerName="mariadb-account-create-update" Dec 05 17:33:25 crc kubenswrapper[4753]: E1205 17:33:25.061877 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ecd357b-ae3c-42f0-a90b-f09e00af942a" containerName="mariadb-database-create" Dec 05 17:33:25 crc kubenswrapper[4753]: I1205 17:33:25.061884 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ecd357b-ae3c-42f0-a90b-f09e00af942a" containerName="mariadb-database-create" Dec 05 17:33:25 crc kubenswrapper[4753]: E1205 17:33:25.061898 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26f56e94-9cee-4adb-a29e-74661b598739" containerName="mariadb-account-create-update" Dec 05 17:33:25 crc kubenswrapper[4753]: I1205 17:33:25.061906 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="26f56e94-9cee-4adb-a29e-74661b598739" containerName="mariadb-account-create-update" Dec 05 17:33:25 crc kubenswrapper[4753]: I1205 17:33:25.062118 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e1035b6-a207-4ac0-bee6-2ce590e2101e" containerName="mariadb-database-create" Dec 05 17:33:25 crc kubenswrapper[4753]: I1205 17:33:25.062138 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="01bbb12f-14b3-4fdb-972e-e33615efe1a3" containerName="mariadb-database-create" Dec 05 17:33:25 crc kubenswrapper[4753]: I1205 17:33:25.062173 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="26f56e94-9cee-4adb-a29e-74661b598739" containerName="mariadb-account-create-update" Dec 05 17:33:25 crc kubenswrapper[4753]: I1205 17:33:25.062188 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="0ecd357b-ae3c-42f0-a90b-f09e00af942a" containerName="mariadb-database-create" Dec 05 17:33:25 crc kubenswrapper[4753]: I1205 17:33:25.062207 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="821491b7-948f-4be8-b01f-90168f29ab11" containerName="mariadb-account-create-update" Dec 05 17:33:25 crc kubenswrapper[4753]: I1205 17:33:25.063098 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-8m7cw-config-xrzkr" Dec 05 17:33:25 crc kubenswrapper[4753]: I1205 17:33:25.066180 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Dec 05 17:33:25 crc kubenswrapper[4753]: I1205 17:33:25.081822 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-8m7cw-config-xrzkr"] Dec 05 17:33:25 crc kubenswrapper[4753]: I1205 17:33:25.138245 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sjfp2\" (UniqueName: \"kubernetes.io/projected/f951edf4-2840-49d4-89b3-5c3ba221257d-kube-api-access-sjfp2\") pod \"ovn-controller-8m7cw-config-xrzkr\" (UID: \"f951edf4-2840-49d4-89b3-5c3ba221257d\") " pod="openstack/ovn-controller-8m7cw-config-xrzkr" Dec 05 17:33:25 crc kubenswrapper[4753]: I1205 17:33:25.138324 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/f951edf4-2840-49d4-89b3-5c3ba221257d-var-run\") pod \"ovn-controller-8m7cw-config-xrzkr\" (UID: \"f951edf4-2840-49d4-89b3-5c3ba221257d\") " pod="openstack/ovn-controller-8m7cw-config-xrzkr" Dec 05 17:33:25 crc kubenswrapper[4753]: I1205 17:33:25.138387 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/f951edf4-2840-49d4-89b3-5c3ba221257d-var-log-ovn\") pod \"ovn-controller-8m7cw-config-xrzkr\" (UID: \"f951edf4-2840-49d4-89b3-5c3ba221257d\") " pod="openstack/ovn-controller-8m7cw-config-xrzkr" Dec 05 17:33:25 crc kubenswrapper[4753]: I1205 17:33:25.138447 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/f951edf4-2840-49d4-89b3-5c3ba221257d-additional-scripts\") pod \"ovn-controller-8m7cw-config-xrzkr\" (UID: \"f951edf4-2840-49d4-89b3-5c3ba221257d\") " pod="openstack/ovn-controller-8m7cw-config-xrzkr" Dec 05 17:33:25 crc kubenswrapper[4753]: I1205 17:33:25.138499 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f951edf4-2840-49d4-89b3-5c3ba221257d-scripts\") pod \"ovn-controller-8m7cw-config-xrzkr\" (UID: \"f951edf4-2840-49d4-89b3-5c3ba221257d\") " pod="openstack/ovn-controller-8m7cw-config-xrzkr" Dec 05 17:33:25 crc kubenswrapper[4753]: I1205 17:33:25.138521 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/f951edf4-2840-49d4-89b3-5c3ba221257d-var-run-ovn\") pod \"ovn-controller-8m7cw-config-xrzkr\" (UID: \"f951edf4-2840-49d4-89b3-5c3ba221257d\") " pod="openstack/ovn-controller-8m7cw-config-xrzkr" Dec 05 17:33:25 crc kubenswrapper[4753]: I1205 17:33:25.238510 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-313f-account-create-update-kbxsm" Dec 05 17:33:25 crc kubenswrapper[4753]: I1205 17:33:25.239683 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sjfp2\" (UniqueName: \"kubernetes.io/projected/f951edf4-2840-49d4-89b3-5c3ba221257d-kube-api-access-sjfp2\") pod \"ovn-controller-8m7cw-config-xrzkr\" (UID: \"f951edf4-2840-49d4-89b3-5c3ba221257d\") " pod="openstack/ovn-controller-8m7cw-config-xrzkr" Dec 05 17:33:25 crc kubenswrapper[4753]: I1205 17:33:25.239730 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/f951edf4-2840-49d4-89b3-5c3ba221257d-var-run\") pod \"ovn-controller-8m7cw-config-xrzkr\" (UID: \"f951edf4-2840-49d4-89b3-5c3ba221257d\") " pod="openstack/ovn-controller-8m7cw-config-xrzkr" Dec 05 17:33:25 crc kubenswrapper[4753]: I1205 17:33:25.239770 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/f951edf4-2840-49d4-89b3-5c3ba221257d-var-log-ovn\") pod \"ovn-controller-8m7cw-config-xrzkr\" (UID: \"f951edf4-2840-49d4-89b3-5c3ba221257d\") " pod="openstack/ovn-controller-8m7cw-config-xrzkr" Dec 05 17:33:25 crc kubenswrapper[4753]: I1205 17:33:25.239845 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/f951edf4-2840-49d4-89b3-5c3ba221257d-additional-scripts\") pod \"ovn-controller-8m7cw-config-xrzkr\" (UID: \"f951edf4-2840-49d4-89b3-5c3ba221257d\") " pod="openstack/ovn-controller-8m7cw-config-xrzkr" Dec 05 17:33:25 crc kubenswrapper[4753]: I1205 17:33:25.239913 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f951edf4-2840-49d4-89b3-5c3ba221257d-scripts\") pod \"ovn-controller-8m7cw-config-xrzkr\" (UID: \"f951edf4-2840-49d4-89b3-5c3ba221257d\") " pod="openstack/ovn-controller-8m7cw-config-xrzkr" Dec 05 17:33:25 crc kubenswrapper[4753]: I1205 17:33:25.239942 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/f951edf4-2840-49d4-89b3-5c3ba221257d-var-run-ovn\") pod \"ovn-controller-8m7cw-config-xrzkr\" (UID: \"f951edf4-2840-49d4-89b3-5c3ba221257d\") " pod="openstack/ovn-controller-8m7cw-config-xrzkr" Dec 05 17:33:25 crc kubenswrapper[4753]: I1205 17:33:25.240055 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/f951edf4-2840-49d4-89b3-5c3ba221257d-var-run-ovn\") pod \"ovn-controller-8m7cw-config-xrzkr\" (UID: \"f951edf4-2840-49d4-89b3-5c3ba221257d\") " pod="openstack/ovn-controller-8m7cw-config-xrzkr" Dec 05 17:33:25 crc kubenswrapper[4753]: I1205 17:33:25.240210 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/f951edf4-2840-49d4-89b3-5c3ba221257d-var-run\") pod \"ovn-controller-8m7cw-config-xrzkr\" (UID: \"f951edf4-2840-49d4-89b3-5c3ba221257d\") " pod="openstack/ovn-controller-8m7cw-config-xrzkr" Dec 05 17:33:25 crc kubenswrapper[4753]: I1205 17:33:25.240260 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/f951edf4-2840-49d4-89b3-5c3ba221257d-var-log-ovn\") pod \"ovn-controller-8m7cw-config-xrzkr\" (UID: \"f951edf4-2840-49d4-89b3-5c3ba221257d\") " pod="openstack/ovn-controller-8m7cw-config-xrzkr" Dec 05 17:33:25 crc kubenswrapper[4753]: I1205 17:33:25.240742 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/f951edf4-2840-49d4-89b3-5c3ba221257d-additional-scripts\") pod \"ovn-controller-8m7cw-config-xrzkr\" (UID: \"f951edf4-2840-49d4-89b3-5c3ba221257d\") " pod="openstack/ovn-controller-8m7cw-config-xrzkr" Dec 05 17:33:25 crc kubenswrapper[4753]: I1205 17:33:25.243463 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f951edf4-2840-49d4-89b3-5c3ba221257d-scripts\") pod \"ovn-controller-8m7cw-config-xrzkr\" (UID: \"f951edf4-2840-49d4-89b3-5c3ba221257d\") " pod="openstack/ovn-controller-8m7cw-config-xrzkr" Dec 05 17:33:25 crc kubenswrapper[4753]: I1205 17:33:25.246822 4753 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/prometheus-metric-storage-0" podUID="c44e1a93-e233-46a2-b18a-e6c8c396a394" containerName="prometheus" probeResult="failure" output="Get \"http://10.217.0.118:9090/-/ready\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 17:33:25 crc kubenswrapper[4753]: I1205 17:33:25.260223 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sjfp2\" (UniqueName: \"kubernetes.io/projected/f951edf4-2840-49d4-89b3-5c3ba221257d-kube-api-access-sjfp2\") pod \"ovn-controller-8m7cw-config-xrzkr\" (UID: \"f951edf4-2840-49d4-89b3-5c3ba221257d\") " pod="openstack/ovn-controller-8m7cw-config-xrzkr" Dec 05 17:33:25 crc kubenswrapper[4753]: I1205 17:33:25.341345 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5bd95044-6b92-43ac-8bab-05b16590530c-operator-scripts\") pod \"5bd95044-6b92-43ac-8bab-05b16590530c\" (UID: \"5bd95044-6b92-43ac-8bab-05b16590530c\") " Dec 05 17:33:25 crc kubenswrapper[4753]: I1205 17:33:25.341466 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hqf6r\" (UniqueName: \"kubernetes.io/projected/5bd95044-6b92-43ac-8bab-05b16590530c-kube-api-access-hqf6r\") pod \"5bd95044-6b92-43ac-8bab-05b16590530c\" (UID: \"5bd95044-6b92-43ac-8bab-05b16590530c\") " Dec 05 17:33:25 crc kubenswrapper[4753]: I1205 17:33:25.342278 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5bd95044-6b92-43ac-8bab-05b16590530c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "5bd95044-6b92-43ac-8bab-05b16590530c" (UID: "5bd95044-6b92-43ac-8bab-05b16590530c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:25 crc kubenswrapper[4753]: I1205 17:33:25.344944 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5bd95044-6b92-43ac-8bab-05b16590530c-kube-api-access-hqf6r" (OuterVolumeSpecName: "kube-api-access-hqf6r") pod "5bd95044-6b92-43ac-8bab-05b16590530c" (UID: "5bd95044-6b92-43ac-8bab-05b16590530c"). InnerVolumeSpecName "kube-api-access-hqf6r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:25 crc kubenswrapper[4753]: I1205 17:33:25.407748 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-8m7cw-config-xrzkr" Dec 05 17:33:25 crc kubenswrapper[4753]: I1205 17:33:25.443769 4753 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5bd95044-6b92-43ac-8bab-05b16590530c-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:25 crc kubenswrapper[4753]: I1205 17:33:25.443806 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hqf6r\" (UniqueName: \"kubernetes.io/projected/5bd95044-6b92-43ac-8bab-05b16590530c-kube-api-access-hqf6r\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:25 crc kubenswrapper[4753]: I1205 17:33:25.736016 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f4707e97-4f70-42d5-959e-1d2c8a9629e5","Type":"ContainerStarted","Data":"3d84ebaa92dda4938444dbe66351dc70145ad06833949a0a74f258ea3d54cb14"} Dec 05 17:33:25 crc kubenswrapper[4753]: I1205 17:33:25.738570 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-313f-account-create-update-kbxsm" Dec 05 17:33:25 crc kubenswrapper[4753]: I1205 17:33:25.738595 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-313f-account-create-update-kbxsm" event={"ID":"5bd95044-6b92-43ac-8bab-05b16590530c","Type":"ContainerDied","Data":"0445e4ecee9c588235b681f4f4df101daeb5b9a8d2eba3560a66f5ae232aa930"} Dec 05 17:33:25 crc kubenswrapper[4753]: I1205 17:33:25.738617 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0445e4ecee9c588235b681f4f4df101daeb5b9a8d2eba3560a66f5ae232aa930" Dec 05 17:33:25 crc kubenswrapper[4753]: I1205 17:33:25.914750 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-8m7cw-config-xrzkr"] Dec 05 17:33:26 crc kubenswrapper[4753]: W1205 17:33:26.063562 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf951edf4_2840_49d4_89b3_5c3ba221257d.slice/crio-67ad041d9d07e6f527185c8138fa52ef62aa43d08d989098a41c715591bfb0c1 WatchSource:0}: Error finding container 67ad041d9d07e6f527185c8138fa52ef62aa43d08d989098a41c715591bfb0c1: Status 404 returned error can't find the container with id 67ad041d9d07e6f527185c8138fa52ef62aa43d08d989098a41c715591bfb0c1 Dec 05 17:33:26 crc kubenswrapper[4753]: I1205 17:33:26.753019 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-8m7cw-config-xrzkr" event={"ID":"f951edf4-2840-49d4-89b3-5c3ba221257d","Type":"ContainerStarted","Data":"446a6d48feef1dc049a50a1571760f8e2277f5954d496d300c7e59eeaf6e4a96"} Dec 05 17:33:26 crc kubenswrapper[4753]: I1205 17:33:26.753442 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-8m7cw-config-xrzkr" event={"ID":"f951edf4-2840-49d4-89b3-5c3ba221257d","Type":"ContainerStarted","Data":"67ad041d9d07e6f527185c8138fa52ef62aa43d08d989098a41c715591bfb0c1"} Dec 05 17:33:26 crc kubenswrapper[4753]: I1205 17:33:26.754857 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f4707e97-4f70-42d5-959e-1d2c8a9629e5","Type":"ContainerStarted","Data":"86ebce8f1a7a94096bf44e97fada8293d26326228ed6f601057021dfee74b9c1"} Dec 05 17:33:26 crc kubenswrapper[4753]: I1205 17:33:26.781659 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-8m7cw-config-xrzkr" podStartSLOduration=1.7816356249999998 podStartE2EDuration="1.781635625s" podCreationTimestamp="2025-12-05 17:33:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:33:26.771580471 +0000 UTC m=+1745.274687477" watchObservedRunningTime="2025-12-05 17:33:26.781635625 +0000 UTC m=+1745.284742631" Dec 05 17:33:27 crc kubenswrapper[4753]: I1205 17:33:27.775613 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"b4a8b3a8-c966-41e1-bb1e-a054e3c3e189","Type":"ContainerStarted","Data":"47f626ab782058e176ed0f60c7b13106547864172a42a27e0a3e7b854a697a2d"} Dec 05 17:33:27 crc kubenswrapper[4753]: I1205 17:33:27.779610 4753 generic.go:334] "Generic (PLEG): container finished" podID="f951edf4-2840-49d4-89b3-5c3ba221257d" containerID="446a6d48feef1dc049a50a1571760f8e2277f5954d496d300c7e59eeaf6e4a96" exitCode=0 Dec 05 17:33:27 crc kubenswrapper[4753]: I1205 17:33:27.779671 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-8m7cw-config-xrzkr" event={"ID":"f951edf4-2840-49d4-89b3-5c3ba221257d","Type":"ContainerDied","Data":"446a6d48feef1dc049a50a1571760f8e2277f5954d496d300c7e59eeaf6e4a96"} Dec 05 17:33:27 crc kubenswrapper[4753]: I1205 17:33:27.786486 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f4707e97-4f70-42d5-959e-1d2c8a9629e5","Type":"ContainerStarted","Data":"36f194a5bd460ed52d6400b41ffa219ebe0e752b30d1d83bfa11fb1fb8748e1e"} Dec 05 17:33:27 crc kubenswrapper[4753]: I1205 17:33:27.786538 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f4707e97-4f70-42d5-959e-1d2c8a9629e5","Type":"ContainerStarted","Data":"233fad0cc63369f09bce15df02f3250ddea7f1c75024983306394fee45c4f4ae"} Dec 05 17:33:27 crc kubenswrapper[4753]: I1205 17:33:27.786551 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f4707e97-4f70-42d5-959e-1d2c8a9629e5","Type":"ContainerStarted","Data":"b48d59731cb13da95de3258d14f7b6c7845d544e6a60fed5642657b926f319f5"} Dec 05 17:33:29 crc kubenswrapper[4753]: I1205 17:33:29.478806 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-8m7cw-config-xrzkr" Dec 05 17:33:29 crc kubenswrapper[4753]: I1205 17:33:29.649896 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/f951edf4-2840-49d4-89b3-5c3ba221257d-var-run-ovn\") pod \"f951edf4-2840-49d4-89b3-5c3ba221257d\" (UID: \"f951edf4-2840-49d4-89b3-5c3ba221257d\") " Dec 05 17:33:29 crc kubenswrapper[4753]: I1205 17:33:29.649956 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f951edf4-2840-49d4-89b3-5c3ba221257d-scripts\") pod \"f951edf4-2840-49d4-89b3-5c3ba221257d\" (UID: \"f951edf4-2840-49d4-89b3-5c3ba221257d\") " Dec 05 17:33:29 crc kubenswrapper[4753]: I1205 17:33:29.650000 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/f951edf4-2840-49d4-89b3-5c3ba221257d-additional-scripts\") pod \"f951edf4-2840-49d4-89b3-5c3ba221257d\" (UID: \"f951edf4-2840-49d4-89b3-5c3ba221257d\") " Dec 05 17:33:29 crc kubenswrapper[4753]: I1205 17:33:29.649998 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f951edf4-2840-49d4-89b3-5c3ba221257d-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "f951edf4-2840-49d4-89b3-5c3ba221257d" (UID: "f951edf4-2840-49d4-89b3-5c3ba221257d"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 17:33:29 crc kubenswrapper[4753]: I1205 17:33:29.650026 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/f951edf4-2840-49d4-89b3-5c3ba221257d-var-run\") pod \"f951edf4-2840-49d4-89b3-5c3ba221257d\" (UID: \"f951edf4-2840-49d4-89b3-5c3ba221257d\") " Dec 05 17:33:29 crc kubenswrapper[4753]: I1205 17:33:29.650061 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/f951edf4-2840-49d4-89b3-5c3ba221257d-var-log-ovn\") pod \"f951edf4-2840-49d4-89b3-5c3ba221257d\" (UID: \"f951edf4-2840-49d4-89b3-5c3ba221257d\") " Dec 05 17:33:29 crc kubenswrapper[4753]: I1205 17:33:29.650140 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f951edf4-2840-49d4-89b3-5c3ba221257d-var-run" (OuterVolumeSpecName: "var-run") pod "f951edf4-2840-49d4-89b3-5c3ba221257d" (UID: "f951edf4-2840-49d4-89b3-5c3ba221257d"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 17:33:29 crc kubenswrapper[4753]: I1205 17:33:29.650248 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f951edf4-2840-49d4-89b3-5c3ba221257d-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "f951edf4-2840-49d4-89b3-5c3ba221257d" (UID: "f951edf4-2840-49d4-89b3-5c3ba221257d"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 17:33:29 crc kubenswrapper[4753]: I1205 17:33:29.650276 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sjfp2\" (UniqueName: \"kubernetes.io/projected/f951edf4-2840-49d4-89b3-5c3ba221257d-kube-api-access-sjfp2\") pod \"f951edf4-2840-49d4-89b3-5c3ba221257d\" (UID: \"f951edf4-2840-49d4-89b3-5c3ba221257d\") " Dec 05 17:33:29 crc kubenswrapper[4753]: I1205 17:33:29.650669 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f951edf4-2840-49d4-89b3-5c3ba221257d-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "f951edf4-2840-49d4-89b3-5c3ba221257d" (UID: "f951edf4-2840-49d4-89b3-5c3ba221257d"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:29 crc kubenswrapper[4753]: I1205 17:33:29.651059 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f951edf4-2840-49d4-89b3-5c3ba221257d-scripts" (OuterVolumeSpecName: "scripts") pod "f951edf4-2840-49d4-89b3-5c3ba221257d" (UID: "f951edf4-2840-49d4-89b3-5c3ba221257d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:29 crc kubenswrapper[4753]: I1205 17:33:29.651279 4753 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/f951edf4-2840-49d4-89b3-5c3ba221257d-var-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:29 crc kubenswrapper[4753]: I1205 17:33:29.651300 4753 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f951edf4-2840-49d4-89b3-5c3ba221257d-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:29 crc kubenswrapper[4753]: I1205 17:33:29.651310 4753 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/f951edf4-2840-49d4-89b3-5c3ba221257d-additional-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:29 crc kubenswrapper[4753]: I1205 17:33:29.651321 4753 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/f951edf4-2840-49d4-89b3-5c3ba221257d-var-run\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:29 crc kubenswrapper[4753]: I1205 17:33:29.651330 4753 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/f951edf4-2840-49d4-89b3-5c3ba221257d-var-log-ovn\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:29 crc kubenswrapper[4753]: I1205 17:33:29.672436 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f951edf4-2840-49d4-89b3-5c3ba221257d-kube-api-access-sjfp2" (OuterVolumeSpecName: "kube-api-access-sjfp2") pod "f951edf4-2840-49d4-89b3-5c3ba221257d" (UID: "f951edf4-2840-49d4-89b3-5c3ba221257d"). InnerVolumeSpecName "kube-api-access-sjfp2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:29 crc kubenswrapper[4753]: I1205 17:33:29.720528 4753 scope.go:117] "RemoveContainer" containerID="87682a74661e693e498cd793cc20d16fc9f4a3b8a1a6b54f10285e2dcd15eafd" Dec 05 17:33:29 crc kubenswrapper[4753]: E1205 17:33:29.720802 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 17:33:29 crc kubenswrapper[4753]: I1205 17:33:29.753119 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sjfp2\" (UniqueName: \"kubernetes.io/projected/f951edf4-2840-49d4-89b3-5c3ba221257d-kube-api-access-sjfp2\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:29 crc kubenswrapper[4753]: I1205 17:33:29.771416 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-8m7cw" Dec 05 17:33:29 crc kubenswrapper[4753]: I1205 17:33:29.815895 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f4707e97-4f70-42d5-959e-1d2c8a9629e5","Type":"ContainerStarted","Data":"13ee83d6028b167ec14ff1a1b472861cf1611abe8cea27a734951b8edecce209"} Dec 05 17:33:29 crc kubenswrapper[4753]: I1205 17:33:29.815938 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f4707e97-4f70-42d5-959e-1d2c8a9629e5","Type":"ContainerStarted","Data":"e657dd2475264a29fc4b75a06fc3228b8b19b87e0f708976ea1adb5d2d960b3b"} Dec 05 17:33:29 crc kubenswrapper[4753]: I1205 17:33:29.817961 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-8m7cw-config-xrzkr" event={"ID":"f951edf4-2840-49d4-89b3-5c3ba221257d","Type":"ContainerDied","Data":"67ad041d9d07e6f527185c8138fa52ef62aa43d08d989098a41c715591bfb0c1"} Dec 05 17:33:29 crc kubenswrapper[4753]: I1205 17:33:29.818007 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="67ad041d9d07e6f527185c8138fa52ef62aa43d08d989098a41c715591bfb0c1" Dec 05 17:33:29 crc kubenswrapper[4753]: I1205 17:33:29.818062 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-8m7cw-config-xrzkr" Dec 05 17:33:29 crc kubenswrapper[4753]: I1205 17:33:29.884202 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-8m7cw-config-xrzkr"] Dec 05 17:33:29 crc kubenswrapper[4753]: I1205 17:33:29.895733 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-8m7cw-config-xrzkr"] Dec 05 17:33:29 crc kubenswrapper[4753]: I1205 17:33:29.940679 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-jzgbr"] Dec 05 17:33:29 crc kubenswrapper[4753]: E1205 17:33:29.941987 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f951edf4-2840-49d4-89b3-5c3ba221257d" containerName="ovn-config" Dec 05 17:33:29 crc kubenswrapper[4753]: I1205 17:33:29.942026 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="f951edf4-2840-49d4-89b3-5c3ba221257d" containerName="ovn-config" Dec 05 17:33:29 crc kubenswrapper[4753]: E1205 17:33:29.942084 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5bd95044-6b92-43ac-8bab-05b16590530c" containerName="mariadb-account-create-update" Dec 05 17:33:29 crc kubenswrapper[4753]: I1205 17:33:29.942094 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="5bd95044-6b92-43ac-8bab-05b16590530c" containerName="mariadb-account-create-update" Dec 05 17:33:29 crc kubenswrapper[4753]: I1205 17:33:29.942972 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="f951edf4-2840-49d4-89b3-5c3ba221257d" containerName="ovn-config" Dec 05 17:33:29 crc kubenswrapper[4753]: I1205 17:33:29.942987 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="5bd95044-6b92-43ac-8bab-05b16590530c" containerName="mariadb-account-create-update" Dec 05 17:33:29 crc kubenswrapper[4753]: I1205 17:33:29.943992 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-jzgbr" Dec 05 17:33:30 crc kubenswrapper[4753]: I1205 17:33:30.014307 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Dec 05 17:33:30 crc kubenswrapper[4753]: I1205 17:33:30.014519 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-x2mdw" Dec 05 17:33:30 crc kubenswrapper[4753]: I1205 17:33:30.037002 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-jzgbr"] Dec 05 17:33:30 crc kubenswrapper[4753]: I1205 17:33:30.055497 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-4rjz4"] Dec 05 17:33:30 crc kubenswrapper[4753]: I1205 17:33:30.056828 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-4rjz4" Dec 05 17:33:30 crc kubenswrapper[4753]: I1205 17:33:30.060102 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/98474d5b-9d55-4704-a02c-1fd4ccf3f2f9-config-data\") pod \"glance-db-sync-jzgbr\" (UID: \"98474d5b-9d55-4704-a02c-1fd4ccf3f2f9\") " pod="openstack/glance-db-sync-jzgbr" Dec 05 17:33:30 crc kubenswrapper[4753]: I1205 17:33:30.060213 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/98474d5b-9d55-4704-a02c-1fd4ccf3f2f9-combined-ca-bundle\") pod \"glance-db-sync-jzgbr\" (UID: \"98474d5b-9d55-4704-a02c-1fd4ccf3f2f9\") " pod="openstack/glance-db-sync-jzgbr" Dec 05 17:33:30 crc kubenswrapper[4753]: I1205 17:33:30.060243 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4w8vw\" (UniqueName: \"kubernetes.io/projected/98474d5b-9d55-4704-a02c-1fd4ccf3f2f9-kube-api-access-4w8vw\") pod \"glance-db-sync-jzgbr\" (UID: \"98474d5b-9d55-4704-a02c-1fd4ccf3f2f9\") " pod="openstack/glance-db-sync-jzgbr" Dec 05 17:33:30 crc kubenswrapper[4753]: I1205 17:33:30.060292 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/98474d5b-9d55-4704-a02c-1fd4ccf3f2f9-db-sync-config-data\") pod \"glance-db-sync-jzgbr\" (UID: \"98474d5b-9d55-4704-a02c-1fd4ccf3f2f9\") " pod="openstack/glance-db-sync-jzgbr" Dec 05 17:33:30 crc kubenswrapper[4753]: I1205 17:33:30.062388 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-4rjz4"] Dec 05 17:33:30 crc kubenswrapper[4753]: I1205 17:33:30.063659 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 05 17:33:30 crc kubenswrapper[4753]: I1205 17:33:30.063806 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 05 17:33:30 crc kubenswrapper[4753]: I1205 17:33:30.063880 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-jfr42" Dec 05 17:33:30 crc kubenswrapper[4753]: I1205 17:33:30.065733 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 05 17:33:30 crc kubenswrapper[4753]: I1205 17:33:30.162612 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bngjz\" (UniqueName: \"kubernetes.io/projected/ac022266-ee65-4109-9d59-b3e122702ccd-kube-api-access-bngjz\") pod \"keystone-db-sync-4rjz4\" (UID: \"ac022266-ee65-4109-9d59-b3e122702ccd\") " pod="openstack/keystone-db-sync-4rjz4" Dec 05 17:33:30 crc kubenswrapper[4753]: I1205 17:33:30.162681 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4w8vw\" (UniqueName: \"kubernetes.io/projected/98474d5b-9d55-4704-a02c-1fd4ccf3f2f9-kube-api-access-4w8vw\") pod \"glance-db-sync-jzgbr\" (UID: \"98474d5b-9d55-4704-a02c-1fd4ccf3f2f9\") " pod="openstack/glance-db-sync-jzgbr" Dec 05 17:33:30 crc kubenswrapper[4753]: I1205 17:33:30.162782 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/98474d5b-9d55-4704-a02c-1fd4ccf3f2f9-db-sync-config-data\") pod \"glance-db-sync-jzgbr\" (UID: \"98474d5b-9d55-4704-a02c-1fd4ccf3f2f9\") " pod="openstack/glance-db-sync-jzgbr" Dec 05 17:33:30 crc kubenswrapper[4753]: I1205 17:33:30.162826 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ac022266-ee65-4109-9d59-b3e122702ccd-config-data\") pod \"keystone-db-sync-4rjz4\" (UID: \"ac022266-ee65-4109-9d59-b3e122702ccd\") " pod="openstack/keystone-db-sync-4rjz4" Dec 05 17:33:30 crc kubenswrapper[4753]: I1205 17:33:30.162889 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/98474d5b-9d55-4704-a02c-1fd4ccf3f2f9-config-data\") pod \"glance-db-sync-jzgbr\" (UID: \"98474d5b-9d55-4704-a02c-1fd4ccf3f2f9\") " pod="openstack/glance-db-sync-jzgbr" Dec 05 17:33:30 crc kubenswrapper[4753]: I1205 17:33:30.162943 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac022266-ee65-4109-9d59-b3e122702ccd-combined-ca-bundle\") pod \"keystone-db-sync-4rjz4\" (UID: \"ac022266-ee65-4109-9d59-b3e122702ccd\") " pod="openstack/keystone-db-sync-4rjz4" Dec 05 17:33:30 crc kubenswrapper[4753]: I1205 17:33:30.162997 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/98474d5b-9d55-4704-a02c-1fd4ccf3f2f9-combined-ca-bundle\") pod \"glance-db-sync-jzgbr\" (UID: \"98474d5b-9d55-4704-a02c-1fd4ccf3f2f9\") " pod="openstack/glance-db-sync-jzgbr" Dec 05 17:33:30 crc kubenswrapper[4753]: I1205 17:33:30.168789 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/98474d5b-9d55-4704-a02c-1fd4ccf3f2f9-db-sync-config-data\") pod \"glance-db-sync-jzgbr\" (UID: \"98474d5b-9d55-4704-a02c-1fd4ccf3f2f9\") " pod="openstack/glance-db-sync-jzgbr" Dec 05 17:33:30 crc kubenswrapper[4753]: I1205 17:33:30.169587 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/98474d5b-9d55-4704-a02c-1fd4ccf3f2f9-config-data\") pod \"glance-db-sync-jzgbr\" (UID: \"98474d5b-9d55-4704-a02c-1fd4ccf3f2f9\") " pod="openstack/glance-db-sync-jzgbr" Dec 05 17:33:30 crc kubenswrapper[4753]: I1205 17:33:30.180606 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/98474d5b-9d55-4704-a02c-1fd4ccf3f2f9-combined-ca-bundle\") pod \"glance-db-sync-jzgbr\" (UID: \"98474d5b-9d55-4704-a02c-1fd4ccf3f2f9\") " pod="openstack/glance-db-sync-jzgbr" Dec 05 17:33:30 crc kubenswrapper[4753]: I1205 17:33:30.197467 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4w8vw\" (UniqueName: \"kubernetes.io/projected/98474d5b-9d55-4704-a02c-1fd4ccf3f2f9-kube-api-access-4w8vw\") pod \"glance-db-sync-jzgbr\" (UID: \"98474d5b-9d55-4704-a02c-1fd4ccf3f2f9\") " pod="openstack/glance-db-sync-jzgbr" Dec 05 17:33:30 crc kubenswrapper[4753]: I1205 17:33:30.264260 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac022266-ee65-4109-9d59-b3e122702ccd-combined-ca-bundle\") pod \"keystone-db-sync-4rjz4\" (UID: \"ac022266-ee65-4109-9d59-b3e122702ccd\") " pod="openstack/keystone-db-sync-4rjz4" Dec 05 17:33:30 crc kubenswrapper[4753]: I1205 17:33:30.264348 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bngjz\" (UniqueName: \"kubernetes.io/projected/ac022266-ee65-4109-9d59-b3e122702ccd-kube-api-access-bngjz\") pod \"keystone-db-sync-4rjz4\" (UID: \"ac022266-ee65-4109-9d59-b3e122702ccd\") " pod="openstack/keystone-db-sync-4rjz4" Dec 05 17:33:30 crc kubenswrapper[4753]: I1205 17:33:30.264440 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ac022266-ee65-4109-9d59-b3e122702ccd-config-data\") pod \"keystone-db-sync-4rjz4\" (UID: \"ac022266-ee65-4109-9d59-b3e122702ccd\") " pod="openstack/keystone-db-sync-4rjz4" Dec 05 17:33:30 crc kubenswrapper[4753]: I1205 17:33:30.270859 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ac022266-ee65-4109-9d59-b3e122702ccd-config-data\") pod \"keystone-db-sync-4rjz4\" (UID: \"ac022266-ee65-4109-9d59-b3e122702ccd\") " pod="openstack/keystone-db-sync-4rjz4" Dec 05 17:33:30 crc kubenswrapper[4753]: I1205 17:33:30.275760 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac022266-ee65-4109-9d59-b3e122702ccd-combined-ca-bundle\") pod \"keystone-db-sync-4rjz4\" (UID: \"ac022266-ee65-4109-9d59-b3e122702ccd\") " pod="openstack/keystone-db-sync-4rjz4" Dec 05 17:33:30 crc kubenswrapper[4753]: I1205 17:33:30.283022 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bngjz\" (UniqueName: \"kubernetes.io/projected/ac022266-ee65-4109-9d59-b3e122702ccd-kube-api-access-bngjz\") pod \"keystone-db-sync-4rjz4\" (UID: \"ac022266-ee65-4109-9d59-b3e122702ccd\") " pod="openstack/keystone-db-sync-4rjz4" Dec 05 17:33:30 crc kubenswrapper[4753]: I1205 17:33:30.334073 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-jzgbr" Dec 05 17:33:30 crc kubenswrapper[4753]: I1205 17:33:30.380345 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-4rjz4" Dec 05 17:33:30 crc kubenswrapper[4753]: E1205 17:33:30.732821 4753 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/system.slice/rpm-ostreed.service\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod00d86d0d_6896_435e_808f_eec9c8225f99.slice/crio-2c37b039ede04a74c28a7b3b237b8ffc9042ad09c0aa4c14d4a4581c2a6c11af\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod00d86d0d_6896_435e_808f_eec9c8225f99.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc44e1a93_e233_46a2_b18a_e6c8c396a394.slice/crio-conmon-7eb464d7df6e8d1abc098e2c5cdf2192069287ec1fec6846c7a1b7a928c9a1eb.scope\": RecentStats: unable to find data in memory cache]" Dec 05 17:33:30 crc kubenswrapper[4753]: I1205 17:33:30.847030 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f4707e97-4f70-42d5-959e-1d2c8a9629e5","Type":"ContainerStarted","Data":"3eacb5e1c01e68f9b1913fb87109633a4474bd9aaa2acc4b06cd531c62453d66"} Dec 05 17:33:30 crc kubenswrapper[4753]: I1205 17:33:30.847078 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f4707e97-4f70-42d5-959e-1d2c8a9629e5","Type":"ContainerStarted","Data":"3f6498deeca8693d69b3adbcb3bb03f1fc18cdc78455af05467ce66926df1db3"} Dec 05 17:33:30 crc kubenswrapper[4753]: W1205 17:33:30.908543 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod98474d5b_9d55_4704_a02c_1fd4ccf3f2f9.slice/crio-1c1ec996a85df84ba6642d091720a36d28b35bef1ee9cc80161eee9099701482 WatchSource:0}: Error finding container 1c1ec996a85df84ba6642d091720a36d28b35bef1ee9cc80161eee9099701482: Status 404 returned error can't find the container with id 1c1ec996a85df84ba6642d091720a36d28b35bef1ee9cc80161eee9099701482 Dec 05 17:33:30 crc kubenswrapper[4753]: I1205 17:33:30.908923 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-jzgbr"] Dec 05 17:33:30 crc kubenswrapper[4753]: I1205 17:33:30.928575 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-4rjz4"] Dec 05 17:33:30 crc kubenswrapper[4753]: W1205 17:33:30.929241 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podac022266_ee65_4109_9d59_b3e122702ccd.slice/crio-59f7b6c93614cd5bc3e02f6734b43ddfacf7bff2c7b6c6aca6d9bf1ceaa1f9bf WatchSource:0}: Error finding container 59f7b6c93614cd5bc3e02f6734b43ddfacf7bff2c7b6c6aca6d9bf1ceaa1f9bf: Status 404 returned error can't find the container with id 59f7b6c93614cd5bc3e02f6734b43ddfacf7bff2c7b6c6aca6d9bf1ceaa1f9bf Dec 05 17:33:31 crc kubenswrapper[4753]: I1205 17:33:31.742326 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f951edf4-2840-49d4-89b3-5c3ba221257d" path="/var/lib/kubelet/pods/f951edf4-2840-49d4-89b3-5c3ba221257d/volumes" Dec 05 17:33:31 crc kubenswrapper[4753]: I1205 17:33:31.869624 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f4707e97-4f70-42d5-959e-1d2c8a9629e5","Type":"ContainerStarted","Data":"63e37f3fa4ecf25ca5c9cee73347bcaa4d55dea39de36d657b3855a003fc25cc"} Dec 05 17:33:31 crc kubenswrapper[4753]: I1205 17:33:31.869682 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f4707e97-4f70-42d5-959e-1d2c8a9629e5","Type":"ContainerStarted","Data":"c1096fa65a30a01d01f740feb82dd844f5c4cd358b9e72af8ea86b1e3381ef32"} Dec 05 17:33:31 crc kubenswrapper[4753]: I1205 17:33:31.871621 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-4rjz4" event={"ID":"ac022266-ee65-4109-9d59-b3e122702ccd","Type":"ContainerStarted","Data":"59f7b6c93614cd5bc3e02f6734b43ddfacf7bff2c7b6c6aca6d9bf1ceaa1f9bf"} Dec 05 17:33:31 crc kubenswrapper[4753]: I1205 17:33:31.873236 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-jzgbr" event={"ID":"98474d5b-9d55-4704-a02c-1fd4ccf3f2f9","Type":"ContainerStarted","Data":"1c1ec996a85df84ba6642d091720a36d28b35bef1ee9cc80161eee9099701482"} Dec 05 17:33:32 crc kubenswrapper[4753]: I1205 17:33:32.890632 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f4707e97-4f70-42d5-959e-1d2c8a9629e5","Type":"ContainerStarted","Data":"1542477545bb41c5325a2d7cd474673e989da4fe9f335237f63010ab8e43cc7a"} Dec 05 17:33:32 crc kubenswrapper[4753]: I1205 17:33:32.891268 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f4707e97-4f70-42d5-959e-1d2c8a9629e5","Type":"ContainerStarted","Data":"ba4cc60f53bdd6060a7819db2e29cb0404071918c6fcd4fad5d15a8d4aee0b01"} Dec 05 17:33:32 crc kubenswrapper[4753]: I1205 17:33:32.891284 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f4707e97-4f70-42d5-959e-1d2c8a9629e5","Type":"ContainerStarted","Data":"254d420c11cc060c0bfa9f29cf2b4ea2c5aeeac7cf107422429ff49ebad68706"} Dec 05 17:33:33 crc kubenswrapper[4753]: I1205 17:33:33.908961 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f4707e97-4f70-42d5-959e-1d2c8a9629e5","Type":"ContainerStarted","Data":"dded3939a1a9c84d0a30081973550e5d2b98f77f7cc1db58abe018d5f12a9637"} Dec 05 17:33:33 crc kubenswrapper[4753]: I1205 17:33:33.909007 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"f4707e97-4f70-42d5-959e-1d2c8a9629e5","Type":"ContainerStarted","Data":"fa7671661c9716056032feee974e647c2bae6c573a1fb8f849e22268ad09f88b"} Dec 05 17:33:33 crc kubenswrapper[4753]: I1205 17:33:33.911649 4753 generic.go:334] "Generic (PLEG): container finished" podID="b4a8b3a8-c966-41e1-bb1e-a054e3c3e189" containerID="47f626ab782058e176ed0f60c7b13106547864172a42a27e0a3e7b854a697a2d" exitCode=0 Dec 05 17:33:33 crc kubenswrapper[4753]: I1205 17:33:33.911732 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"b4a8b3a8-c966-41e1-bb1e-a054e3c3e189","Type":"ContainerDied","Data":"47f626ab782058e176ed0f60c7b13106547864172a42a27e0a3e7b854a697a2d"} Dec 05 17:33:33 crc kubenswrapper[4753]: I1205 17:33:33.952461 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=36.500537382 podStartE2EDuration="42.952439083s" podCreationTimestamp="2025-12-05 17:32:51 +0000 UTC" firstStartedPulling="2025-12-05 17:33:24.950040764 +0000 UTC m=+1743.453147770" lastFinishedPulling="2025-12-05 17:33:31.401942475 +0000 UTC m=+1749.905049471" observedRunningTime="2025-12-05 17:33:33.942926564 +0000 UTC m=+1752.446033570" watchObservedRunningTime="2025-12-05 17:33:33.952439083 +0000 UTC m=+1752.455546089" Dec 05 17:33:34 crc kubenswrapper[4753]: I1205 17:33:34.214097 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6d5b6d6b67-4tlsv"] Dec 05 17:33:34 crc kubenswrapper[4753]: I1205 17:33:34.216108 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d5b6d6b67-4tlsv" Dec 05 17:33:34 crc kubenswrapper[4753]: I1205 17:33:34.221218 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Dec 05 17:33:34 crc kubenswrapper[4753]: I1205 17:33:34.240040 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6d5b6d6b67-4tlsv"] Dec 05 17:33:34 crc kubenswrapper[4753]: I1205 17:33:34.341532 4753 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/cloudkitty-lokistack-ingester-0" podUID="227cc7e4-602f-4c1e-afa7-0e106d3f505f" containerName="loki-ingester" probeResult="failure" output="HTTP probe failed with statuscode: 503" Dec 05 17:33:34 crc kubenswrapper[4753]: I1205 17:33:34.353364 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0-ovsdbserver-sb\") pod \"dnsmasq-dns-6d5b6d6b67-4tlsv\" (UID: \"2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-4tlsv" Dec 05 17:33:34 crc kubenswrapper[4753]: I1205 17:33:34.353644 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-csrqm\" (UniqueName: \"kubernetes.io/projected/2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0-kube-api-access-csrqm\") pod \"dnsmasq-dns-6d5b6d6b67-4tlsv\" (UID: \"2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-4tlsv" Dec 05 17:33:34 crc kubenswrapper[4753]: I1205 17:33:34.353745 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0-ovsdbserver-nb\") pod \"dnsmasq-dns-6d5b6d6b67-4tlsv\" (UID: \"2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-4tlsv" Dec 05 17:33:34 crc kubenswrapper[4753]: I1205 17:33:34.353787 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0-dns-svc\") pod \"dnsmasq-dns-6d5b6d6b67-4tlsv\" (UID: \"2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-4tlsv" Dec 05 17:33:34 crc kubenswrapper[4753]: I1205 17:33:34.353875 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0-config\") pod \"dnsmasq-dns-6d5b6d6b67-4tlsv\" (UID: \"2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-4tlsv" Dec 05 17:33:34 crc kubenswrapper[4753]: I1205 17:33:34.354381 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0-dns-swift-storage-0\") pod \"dnsmasq-dns-6d5b6d6b67-4tlsv\" (UID: \"2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-4tlsv" Dec 05 17:33:34 crc kubenswrapper[4753]: I1205 17:33:34.455680 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0-dns-swift-storage-0\") pod \"dnsmasq-dns-6d5b6d6b67-4tlsv\" (UID: \"2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-4tlsv" Dec 05 17:33:34 crc kubenswrapper[4753]: I1205 17:33:34.455758 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0-ovsdbserver-sb\") pod \"dnsmasq-dns-6d5b6d6b67-4tlsv\" (UID: \"2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-4tlsv" Dec 05 17:33:34 crc kubenswrapper[4753]: I1205 17:33:34.455814 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-csrqm\" (UniqueName: \"kubernetes.io/projected/2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0-kube-api-access-csrqm\") pod \"dnsmasq-dns-6d5b6d6b67-4tlsv\" (UID: \"2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-4tlsv" Dec 05 17:33:34 crc kubenswrapper[4753]: I1205 17:33:34.455833 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0-ovsdbserver-nb\") pod \"dnsmasq-dns-6d5b6d6b67-4tlsv\" (UID: \"2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-4tlsv" Dec 05 17:33:34 crc kubenswrapper[4753]: I1205 17:33:34.455852 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0-dns-svc\") pod \"dnsmasq-dns-6d5b6d6b67-4tlsv\" (UID: \"2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-4tlsv" Dec 05 17:33:34 crc kubenswrapper[4753]: I1205 17:33:34.455880 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0-config\") pod \"dnsmasq-dns-6d5b6d6b67-4tlsv\" (UID: \"2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-4tlsv" Dec 05 17:33:34 crc kubenswrapper[4753]: I1205 17:33:34.456659 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0-ovsdbserver-sb\") pod \"dnsmasq-dns-6d5b6d6b67-4tlsv\" (UID: \"2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-4tlsv" Dec 05 17:33:34 crc kubenswrapper[4753]: I1205 17:33:34.456761 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0-dns-swift-storage-0\") pod \"dnsmasq-dns-6d5b6d6b67-4tlsv\" (UID: \"2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-4tlsv" Dec 05 17:33:34 crc kubenswrapper[4753]: I1205 17:33:34.456800 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0-config\") pod \"dnsmasq-dns-6d5b6d6b67-4tlsv\" (UID: \"2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-4tlsv" Dec 05 17:33:34 crc kubenswrapper[4753]: I1205 17:33:34.456817 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0-dns-svc\") pod \"dnsmasq-dns-6d5b6d6b67-4tlsv\" (UID: \"2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-4tlsv" Dec 05 17:33:34 crc kubenswrapper[4753]: I1205 17:33:34.457706 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0-ovsdbserver-nb\") pod \"dnsmasq-dns-6d5b6d6b67-4tlsv\" (UID: \"2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-4tlsv" Dec 05 17:33:34 crc kubenswrapper[4753]: I1205 17:33:34.479117 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-csrqm\" (UniqueName: \"kubernetes.io/projected/2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0-kube-api-access-csrqm\") pod \"dnsmasq-dns-6d5b6d6b67-4tlsv\" (UID: \"2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0\") " pod="openstack/dnsmasq-dns-6d5b6d6b67-4tlsv" Dec 05 17:33:34 crc kubenswrapper[4753]: I1205 17:33:34.541610 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d5b6d6b67-4tlsv" Dec 05 17:33:37 crc kubenswrapper[4753]: I1205 17:33:37.129819 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6d5b6d6b67-4tlsv"] Dec 05 17:33:37 crc kubenswrapper[4753]: W1205 17:33:37.141122 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2a7a89d0_87ed_4add_a8ba_0e0bd9fd22a0.slice/crio-afcf3389ccd48faf77422f803036bb34916e06d86bcec8e0ee18cafca2148c1c WatchSource:0}: Error finding container afcf3389ccd48faf77422f803036bb34916e06d86bcec8e0ee18cafca2148c1c: Status 404 returned error can't find the container with id afcf3389ccd48faf77422f803036bb34916e06d86bcec8e0ee18cafca2148c1c Dec 05 17:33:37 crc kubenswrapper[4753]: I1205 17:33:37.958969 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-4rjz4" event={"ID":"ac022266-ee65-4109-9d59-b3e122702ccd","Type":"ContainerStarted","Data":"6a2916f7375c1b6afc1565f9eabc965d1f3ee4ba592c3cad7af614fc036fd121"} Dec 05 17:33:37 crc kubenswrapper[4753]: I1205 17:33:37.962640 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"b4a8b3a8-c966-41e1-bb1e-a054e3c3e189","Type":"ContainerStarted","Data":"e1b814441eb6296eb650a1606a90b01b12e20fa3d6e44e04a6234680f3c391b4"} Dec 05 17:33:37 crc kubenswrapper[4753]: I1205 17:33:37.964090 4753 generic.go:334] "Generic (PLEG): container finished" podID="2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0" containerID="25a89baf20c42b6d30d3cc81f5cb1ecb5455d6c69f268b8773ede736a8424e69" exitCode=0 Dec 05 17:33:37 crc kubenswrapper[4753]: I1205 17:33:37.964120 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d5b6d6b67-4tlsv" event={"ID":"2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0","Type":"ContainerDied","Data":"25a89baf20c42b6d30d3cc81f5cb1ecb5455d6c69f268b8773ede736a8424e69"} Dec 05 17:33:37 crc kubenswrapper[4753]: I1205 17:33:37.964136 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d5b6d6b67-4tlsv" event={"ID":"2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0","Type":"ContainerStarted","Data":"afcf3389ccd48faf77422f803036bb34916e06d86bcec8e0ee18cafca2148c1c"} Dec 05 17:33:37 crc kubenswrapper[4753]: I1205 17:33:37.981843 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-4rjz4" podStartSLOduration=3.238881126 podStartE2EDuration="8.981824372s" podCreationTimestamp="2025-12-05 17:33:29 +0000 UTC" firstStartedPulling="2025-12-05 17:33:30.931480231 +0000 UTC m=+1749.434587237" lastFinishedPulling="2025-12-05 17:33:36.674423477 +0000 UTC m=+1755.177530483" observedRunningTime="2025-12-05 17:33:37.974239677 +0000 UTC m=+1756.477346683" watchObservedRunningTime="2025-12-05 17:33:37.981824372 +0000 UTC m=+1756.484931378" Dec 05 17:33:40 crc kubenswrapper[4753]: I1205 17:33:40.001087 4753 generic.go:334] "Generic (PLEG): container finished" podID="ac022266-ee65-4109-9d59-b3e122702ccd" containerID="6a2916f7375c1b6afc1565f9eabc965d1f3ee4ba592c3cad7af614fc036fd121" exitCode=0 Dec 05 17:33:40 crc kubenswrapper[4753]: I1205 17:33:40.001198 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-4rjz4" event={"ID":"ac022266-ee65-4109-9d59-b3e122702ccd","Type":"ContainerDied","Data":"6a2916f7375c1b6afc1565f9eabc965d1f3ee4ba592c3cad7af614fc036fd121"} Dec 05 17:33:40 crc kubenswrapper[4753]: E1205 17:33:40.978887 4753 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod00d86d0d_6896_435e_808f_eec9c8225f99.slice/crio-2c37b039ede04a74c28a7b3b237b8ffc9042ad09c0aa4c14d4a4581c2a6c11af\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod00d86d0d_6896_435e_808f_eec9c8225f99.slice\": RecentStats: unable to find data in memory cache], [\"/system.slice/rpm-ostreed.service\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc44e1a93_e233_46a2_b18a_e6c8c396a394.slice/crio-conmon-7eb464d7df6e8d1abc098e2c5cdf2192069287ec1fec6846c7a1b7a928c9a1eb.scope\": RecentStats: unable to find data in memory cache]" Dec 05 17:33:41 crc kubenswrapper[4753]: I1205 17:33:41.011577 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"b4a8b3a8-c966-41e1-bb1e-a054e3c3e189","Type":"ContainerStarted","Data":"3a284fc49840fbcc4614c63ec9cb3b82b359606c5cb7149d603d28f3cc107d20"} Dec 05 17:33:42 crc kubenswrapper[4753]: I1205 17:33:42.720457 4753 scope.go:117] "RemoveContainer" containerID="87682a74661e693e498cd793cc20d16fc9f4a3b8a1a6b54f10285e2dcd15eafd" Dec 05 17:33:42 crc kubenswrapper[4753]: E1205 17:33:42.721057 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 17:33:44 crc kubenswrapper[4753]: I1205 17:33:44.343443 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-lokistack-ingester-0" Dec 05 17:33:45 crc kubenswrapper[4753]: I1205 17:33:45.925254 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-4rjz4" Dec 05 17:33:46 crc kubenswrapper[4753]: I1205 17:33:46.011439 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ac022266-ee65-4109-9d59-b3e122702ccd-config-data\") pod \"ac022266-ee65-4109-9d59-b3e122702ccd\" (UID: \"ac022266-ee65-4109-9d59-b3e122702ccd\") " Dec 05 17:33:46 crc kubenswrapper[4753]: I1205 17:33:46.011613 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bngjz\" (UniqueName: \"kubernetes.io/projected/ac022266-ee65-4109-9d59-b3e122702ccd-kube-api-access-bngjz\") pod \"ac022266-ee65-4109-9d59-b3e122702ccd\" (UID: \"ac022266-ee65-4109-9d59-b3e122702ccd\") " Dec 05 17:33:46 crc kubenswrapper[4753]: I1205 17:33:46.011697 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac022266-ee65-4109-9d59-b3e122702ccd-combined-ca-bundle\") pod \"ac022266-ee65-4109-9d59-b3e122702ccd\" (UID: \"ac022266-ee65-4109-9d59-b3e122702ccd\") " Dec 05 17:33:46 crc kubenswrapper[4753]: I1205 17:33:46.033409 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ac022266-ee65-4109-9d59-b3e122702ccd-kube-api-access-bngjz" (OuterVolumeSpecName: "kube-api-access-bngjz") pod "ac022266-ee65-4109-9d59-b3e122702ccd" (UID: "ac022266-ee65-4109-9d59-b3e122702ccd"). InnerVolumeSpecName "kube-api-access-bngjz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:46 crc kubenswrapper[4753]: I1205 17:33:46.058199 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ac022266-ee65-4109-9d59-b3e122702ccd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ac022266-ee65-4109-9d59-b3e122702ccd" (UID: "ac022266-ee65-4109-9d59-b3e122702ccd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:46 crc kubenswrapper[4753]: I1205 17:33:46.064234 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-4rjz4" event={"ID":"ac022266-ee65-4109-9d59-b3e122702ccd","Type":"ContainerDied","Data":"59f7b6c93614cd5bc3e02f6734b43ddfacf7bff2c7b6c6aca6d9bf1ceaa1f9bf"} Dec 05 17:33:46 crc kubenswrapper[4753]: I1205 17:33:46.064278 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="59f7b6c93614cd5bc3e02f6734b43ddfacf7bff2c7b6c6aca6d9bf1ceaa1f9bf" Dec 05 17:33:46 crc kubenswrapper[4753]: I1205 17:33:46.064312 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-4rjz4" Dec 05 17:33:46 crc kubenswrapper[4753]: I1205 17:33:46.115184 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bngjz\" (UniqueName: \"kubernetes.io/projected/ac022266-ee65-4109-9d59-b3e122702ccd-kube-api-access-bngjz\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:46 crc kubenswrapper[4753]: I1205 17:33:46.115221 4753 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac022266-ee65-4109-9d59-b3e122702ccd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:46 crc kubenswrapper[4753]: I1205 17:33:46.131764 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ac022266-ee65-4109-9d59-b3e122702ccd-config-data" (OuterVolumeSpecName: "config-data") pod "ac022266-ee65-4109-9d59-b3e122702ccd" (UID: "ac022266-ee65-4109-9d59-b3e122702ccd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:46 crc kubenswrapper[4753]: I1205 17:33:46.217233 4753 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ac022266-ee65-4109-9d59-b3e122702ccd-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.079009 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/prometheus-metric-storage-0" event={"ID":"b4a8b3a8-c966-41e1-bb1e-a054e3c3e189","Type":"ContainerStarted","Data":"e360ac4d767551e95781f8ceebd712db1571572dc22ebcf76ae8c1d623c205ad"} Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.081204 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d5b6d6b67-4tlsv" event={"ID":"2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0","Type":"ContainerStarted","Data":"177ff1b53a1f100636c36122cb464c037e581831941a935a655c7dcbe4a6ee49"} Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.081478 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6d5b6d6b67-4tlsv" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.085584 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-jzgbr" event={"ID":"98474d5b-9d55-4704-a02c-1fd4ccf3f2f9","Type":"ContainerStarted","Data":"ebd79e80aaf3fe4c3fc1df9b6c3f177dca356f7f9bf9f677ad32e21e54bff97b"} Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.130297 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/prometheus-metric-storage-0" podStartSLOduration=25.130267047 podStartE2EDuration="25.130267047s" podCreationTimestamp="2025-12-05 17:33:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:33:47.111265779 +0000 UTC m=+1765.614372835" watchObservedRunningTime="2025-12-05 17:33:47.130267047 +0000 UTC m=+1765.633374093" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.157503 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6d5b6d6b67-4tlsv" podStartSLOduration=13.157484457 podStartE2EDuration="13.157484457s" podCreationTimestamp="2025-12-05 17:33:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:33:47.150653124 +0000 UTC m=+1765.653760140" watchObservedRunningTime="2025-12-05 17:33:47.157484457 +0000 UTC m=+1765.660591463" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.169445 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-jzgbr" podStartSLOduration=3.15569928 podStartE2EDuration="18.169425116s" podCreationTimestamp="2025-12-05 17:33:29 +0000 UTC" firstStartedPulling="2025-12-05 17:33:30.911131475 +0000 UTC m=+1749.414238481" lastFinishedPulling="2025-12-05 17:33:45.924857311 +0000 UTC m=+1764.427964317" observedRunningTime="2025-12-05 17:33:47.167415149 +0000 UTC m=+1765.670522155" watchObservedRunningTime="2025-12-05 17:33:47.169425116 +0000 UTC m=+1765.672532122" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.252353 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-kqr6s"] Dec 05 17:33:47 crc kubenswrapper[4753]: E1205 17:33:47.255225 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac022266-ee65-4109-9d59-b3e122702ccd" containerName="keystone-db-sync" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.255244 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac022266-ee65-4109-9d59-b3e122702ccd" containerName="keystone-db-sync" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.255445 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac022266-ee65-4109-9d59-b3e122702ccd" containerName="keystone-db-sync" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.256112 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-kqr6s" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.262675 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.263631 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.265412 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.265507 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-jfr42" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.270835 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.271000 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6d5b6d6b67-4tlsv"] Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.281974 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-kqr6s"] Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.332460 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6f8c45789f-78zf9"] Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.334666 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6f8c45789f-78zf9" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.338650 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/38fb36bd-4737-4f46-a15a-ae6472a03567-credential-keys\") pod \"keystone-bootstrap-kqr6s\" (UID: \"38fb36bd-4737-4f46-a15a-ae6472a03567\") " pod="openstack/keystone-bootstrap-kqr6s" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.338701 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38fb36bd-4737-4f46-a15a-ae6472a03567-combined-ca-bundle\") pod \"keystone-bootstrap-kqr6s\" (UID: \"38fb36bd-4737-4f46-a15a-ae6472a03567\") " pod="openstack/keystone-bootstrap-kqr6s" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.338729 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4r5wc\" (UniqueName: \"kubernetes.io/projected/38fb36bd-4737-4f46-a15a-ae6472a03567-kube-api-access-4r5wc\") pod \"keystone-bootstrap-kqr6s\" (UID: \"38fb36bd-4737-4f46-a15a-ae6472a03567\") " pod="openstack/keystone-bootstrap-kqr6s" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.338796 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/38fb36bd-4737-4f46-a15a-ae6472a03567-fernet-keys\") pod \"keystone-bootstrap-kqr6s\" (UID: \"38fb36bd-4737-4f46-a15a-ae6472a03567\") " pod="openstack/keystone-bootstrap-kqr6s" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.338845 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/38fb36bd-4737-4f46-a15a-ae6472a03567-config-data\") pod \"keystone-bootstrap-kqr6s\" (UID: \"38fb36bd-4737-4f46-a15a-ae6472a03567\") " pod="openstack/keystone-bootstrap-kqr6s" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.338861 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/38fb36bd-4737-4f46-a15a-ae6472a03567-scripts\") pod \"keystone-bootstrap-kqr6s\" (UID: \"38fb36bd-4737-4f46-a15a-ae6472a03567\") " pod="openstack/keystone-bootstrap-kqr6s" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.365693 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6f8c45789f-78zf9"] Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.439981 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5d9e3a47-4d44-490a-bfa0-34d0e0e68373-dns-swift-storage-0\") pod \"dnsmasq-dns-6f8c45789f-78zf9\" (UID: \"5d9e3a47-4d44-490a-bfa0-34d0e0e68373\") " pod="openstack/dnsmasq-dns-6f8c45789f-78zf9" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.440060 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5d9e3a47-4d44-490a-bfa0-34d0e0e68373-config\") pod \"dnsmasq-dns-6f8c45789f-78zf9\" (UID: \"5d9e3a47-4d44-490a-bfa0-34d0e0e68373\") " pod="openstack/dnsmasq-dns-6f8c45789f-78zf9" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.440083 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/38fb36bd-4737-4f46-a15a-ae6472a03567-fernet-keys\") pod \"keystone-bootstrap-kqr6s\" (UID: \"38fb36bd-4737-4f46-a15a-ae6472a03567\") " pod="openstack/keystone-bootstrap-kqr6s" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.440110 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5d9e3a47-4d44-490a-bfa0-34d0e0e68373-ovsdbserver-sb\") pod \"dnsmasq-dns-6f8c45789f-78zf9\" (UID: \"5d9e3a47-4d44-490a-bfa0-34d0e0e68373\") " pod="openstack/dnsmasq-dns-6f8c45789f-78zf9" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.440168 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5d9e3a47-4d44-490a-bfa0-34d0e0e68373-dns-svc\") pod \"dnsmasq-dns-6f8c45789f-78zf9\" (UID: \"5d9e3a47-4d44-490a-bfa0-34d0e0e68373\") " pod="openstack/dnsmasq-dns-6f8c45789f-78zf9" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.440198 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/38fb36bd-4737-4f46-a15a-ae6472a03567-config-data\") pod \"keystone-bootstrap-kqr6s\" (UID: \"38fb36bd-4737-4f46-a15a-ae6472a03567\") " pod="openstack/keystone-bootstrap-kqr6s" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.440215 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/38fb36bd-4737-4f46-a15a-ae6472a03567-scripts\") pod \"keystone-bootstrap-kqr6s\" (UID: \"38fb36bd-4737-4f46-a15a-ae6472a03567\") " pod="openstack/keystone-bootstrap-kqr6s" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.440265 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5d9e3a47-4d44-490a-bfa0-34d0e0e68373-ovsdbserver-nb\") pod \"dnsmasq-dns-6f8c45789f-78zf9\" (UID: \"5d9e3a47-4d44-490a-bfa0-34d0e0e68373\") " pod="openstack/dnsmasq-dns-6f8c45789f-78zf9" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.440280 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/38fb36bd-4737-4f46-a15a-ae6472a03567-credential-keys\") pod \"keystone-bootstrap-kqr6s\" (UID: \"38fb36bd-4737-4f46-a15a-ae6472a03567\") " pod="openstack/keystone-bootstrap-kqr6s" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.440307 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38fb36bd-4737-4f46-a15a-ae6472a03567-combined-ca-bundle\") pod \"keystone-bootstrap-kqr6s\" (UID: \"38fb36bd-4737-4f46-a15a-ae6472a03567\") " pod="openstack/keystone-bootstrap-kqr6s" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.440327 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4r5wc\" (UniqueName: \"kubernetes.io/projected/38fb36bd-4737-4f46-a15a-ae6472a03567-kube-api-access-4r5wc\") pod \"keystone-bootstrap-kqr6s\" (UID: \"38fb36bd-4737-4f46-a15a-ae6472a03567\") " pod="openstack/keystone-bootstrap-kqr6s" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.440348 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kphb4\" (UniqueName: \"kubernetes.io/projected/5d9e3a47-4d44-490a-bfa0-34d0e0e68373-kube-api-access-kphb4\") pod \"dnsmasq-dns-6f8c45789f-78zf9\" (UID: \"5d9e3a47-4d44-490a-bfa0-34d0e0e68373\") " pod="openstack/dnsmasq-dns-6f8c45789f-78zf9" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.448556 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38fb36bd-4737-4f46-a15a-ae6472a03567-combined-ca-bundle\") pod \"keystone-bootstrap-kqr6s\" (UID: \"38fb36bd-4737-4f46-a15a-ae6472a03567\") " pod="openstack/keystone-bootstrap-kqr6s" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.448769 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/38fb36bd-4737-4f46-a15a-ae6472a03567-scripts\") pod \"keystone-bootstrap-kqr6s\" (UID: \"38fb36bd-4737-4f46-a15a-ae6472a03567\") " pod="openstack/keystone-bootstrap-kqr6s" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.451779 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/38fb36bd-4737-4f46-a15a-ae6472a03567-config-data\") pod \"keystone-bootstrap-kqr6s\" (UID: \"38fb36bd-4737-4f46-a15a-ae6472a03567\") " pod="openstack/keystone-bootstrap-kqr6s" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.453595 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/38fb36bd-4737-4f46-a15a-ae6472a03567-fernet-keys\") pod \"keystone-bootstrap-kqr6s\" (UID: \"38fb36bd-4737-4f46-a15a-ae6472a03567\") " pod="openstack/keystone-bootstrap-kqr6s" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.461642 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/38fb36bd-4737-4f46-a15a-ae6472a03567-credential-keys\") pod \"keystone-bootstrap-kqr6s\" (UID: \"38fb36bd-4737-4f46-a15a-ae6472a03567\") " pod="openstack/keystone-bootstrap-kqr6s" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.497981 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4r5wc\" (UniqueName: \"kubernetes.io/projected/38fb36bd-4737-4f46-a15a-ae6472a03567-kube-api-access-4r5wc\") pod \"keystone-bootstrap-kqr6s\" (UID: \"38fb36bd-4737-4f46-a15a-ae6472a03567\") " pod="openstack/keystone-bootstrap-kqr6s" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.519979 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-f6c6z"] Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.521643 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-f6c6z" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.537098 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.541975 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.542847 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-qtvrt" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.542988 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.544232 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/68a4a494-d4ff-43ee-a74c-4f0377d229ec-config-data\") pod \"cinder-db-sync-f6c6z\" (UID: \"68a4a494-d4ff-43ee-a74c-4f0377d229ec\") " pod="openstack/cinder-db-sync-f6c6z" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.544291 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t57k6\" (UniqueName: \"kubernetes.io/projected/68a4a494-d4ff-43ee-a74c-4f0377d229ec-kube-api-access-t57k6\") pod \"cinder-db-sync-f6c6z\" (UID: \"68a4a494-d4ff-43ee-a74c-4f0377d229ec\") " pod="openstack/cinder-db-sync-f6c6z" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.544330 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kphb4\" (UniqueName: \"kubernetes.io/projected/5d9e3a47-4d44-490a-bfa0-34d0e0e68373-kube-api-access-kphb4\") pod \"dnsmasq-dns-6f8c45789f-78zf9\" (UID: \"5d9e3a47-4d44-490a-bfa0-34d0e0e68373\") " pod="openstack/dnsmasq-dns-6f8c45789f-78zf9" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.544360 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/68a4a494-d4ff-43ee-a74c-4f0377d229ec-scripts\") pod \"cinder-db-sync-f6c6z\" (UID: \"68a4a494-d4ff-43ee-a74c-4f0377d229ec\") " pod="openstack/cinder-db-sync-f6c6z" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.544437 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5d9e3a47-4d44-490a-bfa0-34d0e0e68373-dns-swift-storage-0\") pod \"dnsmasq-dns-6f8c45789f-78zf9\" (UID: \"5d9e3a47-4d44-490a-bfa0-34d0e0e68373\") " pod="openstack/dnsmasq-dns-6f8c45789f-78zf9" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.544482 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68a4a494-d4ff-43ee-a74c-4f0377d229ec-combined-ca-bundle\") pod \"cinder-db-sync-f6c6z\" (UID: \"68a4a494-d4ff-43ee-a74c-4f0377d229ec\") " pod="openstack/cinder-db-sync-f6c6z" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.544514 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5d9e3a47-4d44-490a-bfa0-34d0e0e68373-config\") pod \"dnsmasq-dns-6f8c45789f-78zf9\" (UID: \"5d9e3a47-4d44-490a-bfa0-34d0e0e68373\") " pod="openstack/dnsmasq-dns-6f8c45789f-78zf9" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.544547 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5d9e3a47-4d44-490a-bfa0-34d0e0e68373-ovsdbserver-sb\") pod \"dnsmasq-dns-6f8c45789f-78zf9\" (UID: \"5d9e3a47-4d44-490a-bfa0-34d0e0e68373\") " pod="openstack/dnsmasq-dns-6f8c45789f-78zf9" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.544613 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5d9e3a47-4d44-490a-bfa0-34d0e0e68373-dns-svc\") pod \"dnsmasq-dns-6f8c45789f-78zf9\" (UID: \"5d9e3a47-4d44-490a-bfa0-34d0e0e68373\") " pod="openstack/dnsmasq-dns-6f8c45789f-78zf9" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.544675 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/68a4a494-d4ff-43ee-a74c-4f0377d229ec-db-sync-config-data\") pod \"cinder-db-sync-f6c6z\" (UID: \"68a4a494-d4ff-43ee-a74c-4f0377d229ec\") " pod="openstack/cinder-db-sync-f6c6z" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.544699 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/68a4a494-d4ff-43ee-a74c-4f0377d229ec-etc-machine-id\") pod \"cinder-db-sync-f6c6z\" (UID: \"68a4a494-d4ff-43ee-a74c-4f0377d229ec\") " pod="openstack/cinder-db-sync-f6c6z" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.544752 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5d9e3a47-4d44-490a-bfa0-34d0e0e68373-ovsdbserver-nb\") pod \"dnsmasq-dns-6f8c45789f-78zf9\" (UID: \"5d9e3a47-4d44-490a-bfa0-34d0e0e68373\") " pod="openstack/dnsmasq-dns-6f8c45789f-78zf9" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.545742 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5d9e3a47-4d44-490a-bfa0-34d0e0e68373-ovsdbserver-nb\") pod \"dnsmasq-dns-6f8c45789f-78zf9\" (UID: \"5d9e3a47-4d44-490a-bfa0-34d0e0e68373\") " pod="openstack/dnsmasq-dns-6f8c45789f-78zf9" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.546787 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5d9e3a47-4d44-490a-bfa0-34d0e0e68373-dns-swift-storage-0\") pod \"dnsmasq-dns-6f8c45789f-78zf9\" (UID: \"5d9e3a47-4d44-490a-bfa0-34d0e0e68373\") " pod="openstack/dnsmasq-dns-6f8c45789f-78zf9" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.547042 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5d9e3a47-4d44-490a-bfa0-34d0e0e68373-ovsdbserver-sb\") pod \"dnsmasq-dns-6f8c45789f-78zf9\" (UID: \"5d9e3a47-4d44-490a-bfa0-34d0e0e68373\") " pod="openstack/dnsmasq-dns-6f8c45789f-78zf9" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.547534 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5d9e3a47-4d44-490a-bfa0-34d0e0e68373-dns-svc\") pod \"dnsmasq-dns-6f8c45789f-78zf9\" (UID: \"5d9e3a47-4d44-490a-bfa0-34d0e0e68373\") " pod="openstack/dnsmasq-dns-6f8c45789f-78zf9" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.547627 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5d9e3a47-4d44-490a-bfa0-34d0e0e68373-config\") pod \"dnsmasq-dns-6f8c45789f-78zf9\" (UID: \"5d9e3a47-4d44-490a-bfa0-34d0e0e68373\") " pod="openstack/dnsmasq-dns-6f8c45789f-78zf9" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.551762 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.553167 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-f6c6z"] Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.566846 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.567078 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.577051 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-kqr6s" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.584032 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kphb4\" (UniqueName: \"kubernetes.io/projected/5d9e3a47-4d44-490a-bfa0-34d0e0e68373-kube-api-access-kphb4\") pod \"dnsmasq-dns-6f8c45789f-78zf9\" (UID: \"5d9e3a47-4d44-490a-bfa0-34d0e0e68373\") " pod="openstack/dnsmasq-dns-6f8c45789f-78zf9" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.624077 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.646635 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68a4a494-d4ff-43ee-a74c-4f0377d229ec-combined-ca-bundle\") pod \"cinder-db-sync-f6c6z\" (UID: \"68a4a494-d4ff-43ee-a74c-4f0377d229ec\") " pod="openstack/cinder-db-sync-f6c6z" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.646708 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cffca222-0336-40c8-886f-247667322702-scripts\") pod \"ceilometer-0\" (UID: \"cffca222-0336-40c8-886f-247667322702\") " pod="openstack/ceilometer-0" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.646740 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cffca222-0336-40c8-886f-247667322702-log-httpd\") pod \"ceilometer-0\" (UID: \"cffca222-0336-40c8-886f-247667322702\") " pod="openstack/ceilometer-0" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.646780 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7klpl\" (UniqueName: \"kubernetes.io/projected/cffca222-0336-40c8-886f-247667322702-kube-api-access-7klpl\") pod \"ceilometer-0\" (UID: \"cffca222-0336-40c8-886f-247667322702\") " pod="openstack/ceilometer-0" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.647044 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cffca222-0336-40c8-886f-247667322702-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"cffca222-0336-40c8-886f-247667322702\") " pod="openstack/ceilometer-0" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.647084 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/68a4a494-d4ff-43ee-a74c-4f0377d229ec-db-sync-config-data\") pod \"cinder-db-sync-f6c6z\" (UID: \"68a4a494-d4ff-43ee-a74c-4f0377d229ec\") " pod="openstack/cinder-db-sync-f6c6z" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.647110 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/68a4a494-d4ff-43ee-a74c-4f0377d229ec-etc-machine-id\") pod \"cinder-db-sync-f6c6z\" (UID: \"68a4a494-d4ff-43ee-a74c-4f0377d229ec\") " pod="openstack/cinder-db-sync-f6c6z" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.647167 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cffca222-0336-40c8-886f-247667322702-config-data\") pod \"ceilometer-0\" (UID: \"cffca222-0336-40c8-886f-247667322702\") " pod="openstack/ceilometer-0" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.647209 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cffca222-0336-40c8-886f-247667322702-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"cffca222-0336-40c8-886f-247667322702\") " pod="openstack/ceilometer-0" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.647233 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/68a4a494-d4ff-43ee-a74c-4f0377d229ec-config-data\") pod \"cinder-db-sync-f6c6z\" (UID: \"68a4a494-d4ff-43ee-a74c-4f0377d229ec\") " pod="openstack/cinder-db-sync-f6c6z" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.647270 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t57k6\" (UniqueName: \"kubernetes.io/projected/68a4a494-d4ff-43ee-a74c-4f0377d229ec-kube-api-access-t57k6\") pod \"cinder-db-sync-f6c6z\" (UID: \"68a4a494-d4ff-43ee-a74c-4f0377d229ec\") " pod="openstack/cinder-db-sync-f6c6z" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.647311 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cffca222-0336-40c8-886f-247667322702-run-httpd\") pod \"ceilometer-0\" (UID: \"cffca222-0336-40c8-886f-247667322702\") " pod="openstack/ceilometer-0" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.647336 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/68a4a494-d4ff-43ee-a74c-4f0377d229ec-scripts\") pod \"cinder-db-sync-f6c6z\" (UID: \"68a4a494-d4ff-43ee-a74c-4f0377d229ec\") " pod="openstack/cinder-db-sync-f6c6z" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.657215 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-db-sync-9vsjw"] Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.657544 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/68a4a494-d4ff-43ee-a74c-4f0377d229ec-scripts\") pod \"cinder-db-sync-f6c6z\" (UID: \"68a4a494-d4ff-43ee-a74c-4f0377d229ec\") " pod="openstack/cinder-db-sync-f6c6z" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.658225 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/68a4a494-d4ff-43ee-a74c-4f0377d229ec-etc-machine-id\") pod \"cinder-db-sync-f6c6z\" (UID: \"68a4a494-d4ff-43ee-a74c-4f0377d229ec\") " pod="openstack/cinder-db-sync-f6c6z" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.658540 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-sync-9vsjw" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.664465 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cloudkitty-client-internal" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.664599 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-cloudkitty-dockercfg-45bq2" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.669853 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-rwqhq"] Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.670481 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/68a4a494-d4ff-43ee-a74c-4f0377d229ec-db-sync-config-data\") pod \"cinder-db-sync-f6c6z\" (UID: \"68a4a494-d4ff-43ee-a74c-4f0377d229ec\") " pod="openstack/cinder-db-sync-f6c6z" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.673292 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-rwqhq" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.674430 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-scripts" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.674640 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-config-data" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.676381 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68a4a494-d4ff-43ee-a74c-4f0377d229ec-combined-ca-bundle\") pod \"cinder-db-sync-f6c6z\" (UID: \"68a4a494-d4ff-43ee-a74c-4f0377d229ec\") " pod="openstack/cinder-db-sync-f6c6z" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.676640 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.676659 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6f8c45789f-78zf9" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.676889 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-px6hm" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.676972 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.688186 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/68a4a494-d4ff-43ee-a74c-4f0377d229ec-config-data\") pod \"cinder-db-sync-f6c6z\" (UID: \"68a4a494-d4ff-43ee-a74c-4f0377d229ec\") " pod="openstack/cinder-db-sync-f6c6z" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.703889 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t57k6\" (UniqueName: \"kubernetes.io/projected/68a4a494-d4ff-43ee-a74c-4f0377d229ec-kube-api-access-t57k6\") pod \"cinder-db-sync-f6c6z\" (UID: \"68a4a494-d4ff-43ee-a74c-4f0377d229ec\") " pod="openstack/cinder-db-sync-f6c6z" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.712496 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-db-sync-9vsjw"] Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.721283 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-f6c6z" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.748209 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-rwqhq"] Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.752778 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cffca222-0336-40c8-886f-247667322702-run-httpd\") pod \"ceilometer-0\" (UID: \"cffca222-0336-40c8-886f-247667322702\") " pod="openstack/ceilometer-0" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.754977 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cffca222-0336-40c8-886f-247667322702-run-httpd\") pod \"ceilometer-0\" (UID: \"cffca222-0336-40c8-886f-247667322702\") " pod="openstack/ceilometer-0" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.759363 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cffca222-0336-40c8-886f-247667322702-scripts\") pod \"ceilometer-0\" (UID: \"cffca222-0336-40c8-886f-247667322702\") " pod="openstack/ceilometer-0" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.759430 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bbdd764d-13d3-4597-a938-04eeb490a8ba-combined-ca-bundle\") pod \"neutron-db-sync-rwqhq\" (UID: \"bbdd764d-13d3-4597-a938-04eeb490a8ba\") " pod="openstack/neutron-db-sync-rwqhq" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.759453 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cffca222-0336-40c8-886f-247667322702-log-httpd\") pod \"ceilometer-0\" (UID: \"cffca222-0336-40c8-886f-247667322702\") " pod="openstack/ceilometer-0" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.759489 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7klpl\" (UniqueName: \"kubernetes.io/projected/cffca222-0336-40c8-886f-247667322702-kube-api-access-7klpl\") pod \"ceilometer-0\" (UID: \"cffca222-0336-40c8-886f-247667322702\") " pod="openstack/ceilometer-0" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.759613 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cffca222-0336-40c8-886f-247667322702-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"cffca222-0336-40c8-886f-247667322702\") " pod="openstack/ceilometer-0" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.759629 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/bbdd764d-13d3-4597-a938-04eeb490a8ba-config\") pod \"neutron-db-sync-rwqhq\" (UID: \"bbdd764d-13d3-4597-a938-04eeb490a8ba\") " pod="openstack/neutron-db-sync-rwqhq" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.759714 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cffca222-0336-40c8-886f-247667322702-config-data\") pod \"ceilometer-0\" (UID: \"cffca222-0336-40c8-886f-247667322702\") " pod="openstack/ceilometer-0" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.759776 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cffca222-0336-40c8-886f-247667322702-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"cffca222-0336-40c8-886f-247667322702\") " pod="openstack/ceilometer-0" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.759818 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lflvw\" (UniqueName: \"kubernetes.io/projected/bbdd764d-13d3-4597-a938-04eeb490a8ba-kube-api-access-lflvw\") pod \"neutron-db-sync-rwqhq\" (UID: \"bbdd764d-13d3-4597-a938-04eeb490a8ba\") " pod="openstack/neutron-db-sync-rwqhq" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.762393 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cffca222-0336-40c8-886f-247667322702-log-httpd\") pod \"ceilometer-0\" (UID: \"cffca222-0336-40c8-886f-247667322702\") " pod="openstack/ceilometer-0" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.766935 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cffca222-0336-40c8-886f-247667322702-config-data\") pod \"ceilometer-0\" (UID: \"cffca222-0336-40c8-886f-247667322702\") " pod="openstack/ceilometer-0" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.767998 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cffca222-0336-40c8-886f-247667322702-scripts\") pod \"ceilometer-0\" (UID: \"cffca222-0336-40c8-886f-247667322702\") " pod="openstack/ceilometer-0" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.768116 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6f8c45789f-78zf9"] Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.776951 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cffca222-0336-40c8-886f-247667322702-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"cffca222-0336-40c8-886f-247667322702\") " pod="openstack/ceilometer-0" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.835219 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cffca222-0336-40c8-886f-247667322702-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"cffca222-0336-40c8-886f-247667322702\") " pod="openstack/ceilometer-0" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.870052 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bbdd764d-13d3-4597-a938-04eeb490a8ba-combined-ca-bundle\") pod \"neutron-db-sync-rwqhq\" (UID: \"bbdd764d-13d3-4597-a938-04eeb490a8ba\") " pod="openstack/neutron-db-sync-rwqhq" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.872358 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5711686-6b64-450f-b2b4-6583dab08275-combined-ca-bundle\") pod \"cloudkitty-db-sync-9vsjw\" (UID: \"f5711686-6b64-450f-b2b4-6583dab08275\") " pod="openstack/cloudkitty-db-sync-9vsjw" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.872585 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/bbdd764d-13d3-4597-a938-04eeb490a8ba-config\") pod \"neutron-db-sync-rwqhq\" (UID: \"bbdd764d-13d3-4597-a938-04eeb490a8ba\") " pod="openstack/neutron-db-sync-rwqhq" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.872632 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/f5711686-6b64-450f-b2b4-6583dab08275-certs\") pod \"cloudkitty-db-sync-9vsjw\" (UID: \"f5711686-6b64-450f-b2b4-6583dab08275\") " pod="openstack/cloudkitty-db-sync-9vsjw" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.872700 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5711686-6b64-450f-b2b4-6583dab08275-config-data\") pod \"cloudkitty-db-sync-9vsjw\" (UID: \"f5711686-6b64-450f-b2b4-6583dab08275\") " pod="openstack/cloudkitty-db-sync-9vsjw" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.872819 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t2db6\" (UniqueName: \"kubernetes.io/projected/f5711686-6b64-450f-b2b4-6583dab08275-kube-api-access-t2db6\") pod \"cloudkitty-db-sync-9vsjw\" (UID: \"f5711686-6b64-450f-b2b4-6583dab08275\") " pod="openstack/cloudkitty-db-sync-9vsjw" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.872936 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lflvw\" (UniqueName: \"kubernetes.io/projected/bbdd764d-13d3-4597-a938-04eeb490a8ba-kube-api-access-lflvw\") pod \"neutron-db-sync-rwqhq\" (UID: \"bbdd764d-13d3-4597-a938-04eeb490a8ba\") " pod="openstack/neutron-db-sync-rwqhq" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.873016 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f5711686-6b64-450f-b2b4-6583dab08275-scripts\") pod \"cloudkitty-db-sync-9vsjw\" (UID: \"f5711686-6b64-450f-b2b4-6583dab08275\") " pod="openstack/cloudkitty-db-sync-9vsjw" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.871098 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7klpl\" (UniqueName: \"kubernetes.io/projected/cffca222-0336-40c8-886f-247667322702-kube-api-access-7klpl\") pod \"ceilometer-0\" (UID: \"cffca222-0336-40c8-886f-247667322702\") " pod="openstack/ceilometer-0" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.895819 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-l429g"] Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.904247 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-l429g" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.905522 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lflvw\" (UniqueName: \"kubernetes.io/projected/bbdd764d-13d3-4597-a938-04eeb490a8ba-kube-api-access-lflvw\") pod \"neutron-db-sync-rwqhq\" (UID: \"bbdd764d-13d3-4597-a938-04eeb490a8ba\") " pod="openstack/neutron-db-sync-rwqhq" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.907191 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-v5mm4" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.910573 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.923833 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/bbdd764d-13d3-4597-a938-04eeb490a8ba-config\") pod \"neutron-db-sync-rwqhq\" (UID: \"bbdd764d-13d3-4597-a938-04eeb490a8ba\") " pod="openstack/neutron-db-sync-rwqhq" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.928440 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bbdd764d-13d3-4597-a938-04eeb490a8ba-combined-ca-bundle\") pod \"neutron-db-sync-rwqhq\" (UID: \"bbdd764d-13d3-4597-a938-04eeb490a8ba\") " pod="openstack/neutron-db-sync-rwqhq" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.953290 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-l429g"] Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.980786 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-d47nm"] Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.982050 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-d47nm" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.983961 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5711686-6b64-450f-b2b4-6583dab08275-combined-ca-bundle\") pod \"cloudkitty-db-sync-9vsjw\" (UID: \"f5711686-6b64-450f-b2b4-6583dab08275\") " pod="openstack/cloudkitty-db-sync-9vsjw" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.990213 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/f5711686-6b64-450f-b2b4-6583dab08275-certs\") pod \"cloudkitty-db-sync-9vsjw\" (UID: \"f5711686-6b64-450f-b2b4-6583dab08275\") " pod="openstack/cloudkitty-db-sync-9vsjw" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.990400 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5711686-6b64-450f-b2b4-6583dab08275-config-data\") pod \"cloudkitty-db-sync-9vsjw\" (UID: \"f5711686-6b64-450f-b2b4-6583dab08275\") " pod="openstack/cloudkitty-db-sync-9vsjw" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.990507 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t2db6\" (UniqueName: \"kubernetes.io/projected/f5711686-6b64-450f-b2b4-6583dab08275-kube-api-access-t2db6\") pod \"cloudkitty-db-sync-9vsjw\" (UID: \"f5711686-6b64-450f-b2b4-6583dab08275\") " pod="openstack/cloudkitty-db-sync-9vsjw" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.990660 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f5711686-6b64-450f-b2b4-6583dab08275-scripts\") pod \"cloudkitty-db-sync-9vsjw\" (UID: \"f5711686-6b64-450f-b2b4-6583dab08275\") " pod="openstack/cloudkitty-db-sync-9vsjw" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.988769 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.988894 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-fvnlc" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.988916 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.993729 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5711686-6b64-450f-b2b4-6583dab08275-combined-ca-bundle\") pod \"cloudkitty-db-sync-9vsjw\" (UID: \"f5711686-6b64-450f-b2b4-6583dab08275\") " pod="openstack/cloudkitty-db-sync-9vsjw" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.997073 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5711686-6b64-450f-b2b4-6583dab08275-config-data\") pod \"cloudkitty-db-sync-9vsjw\" (UID: \"f5711686-6b64-450f-b2b4-6583dab08275\") " pod="openstack/cloudkitty-db-sync-9vsjw" Dec 05 17:33:47 crc kubenswrapper[4753]: I1205 17:33:47.997656 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/projected/f5711686-6b64-450f-b2b4-6583dab08275-certs\") pod \"cloudkitty-db-sync-9vsjw\" (UID: \"f5711686-6b64-450f-b2b4-6583dab08275\") " pod="openstack/cloudkitty-db-sync-9vsjw" Dec 05 17:33:48 crc kubenswrapper[4753]: I1205 17:33:48.006607 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-fcfdd6f9f-p8h8t"] Dec 05 17:33:48 crc kubenswrapper[4753]: I1205 17:33:48.008308 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f5711686-6b64-450f-b2b4-6583dab08275-scripts\") pod \"cloudkitty-db-sync-9vsjw\" (UID: \"f5711686-6b64-450f-b2b4-6583dab08275\") " pod="openstack/cloudkitty-db-sync-9vsjw" Dec 05 17:33:48 crc kubenswrapper[4753]: I1205 17:33:48.009715 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-fcfdd6f9f-p8h8t" Dec 05 17:33:48 crc kubenswrapper[4753]: I1205 17:33:48.019503 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-rwqhq" Dec 05 17:33:48 crc kubenswrapper[4753]: I1205 17:33:48.025242 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-d47nm"] Dec 05 17:33:48 crc kubenswrapper[4753]: I1205 17:33:48.046382 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-fcfdd6f9f-p8h8t"] Dec 05 17:33:48 crc kubenswrapper[4753]: I1205 17:33:48.052216 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t2db6\" (UniqueName: \"kubernetes.io/projected/f5711686-6b64-450f-b2b4-6583dab08275-kube-api-access-t2db6\") pod \"cloudkitty-db-sync-9vsjw\" (UID: \"f5711686-6b64-450f-b2b4-6583dab08275\") " pod="openstack/cloudkitty-db-sync-9vsjw" Dec 05 17:33:48 crc kubenswrapper[4753]: I1205 17:33:48.065678 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 17:33:48 crc kubenswrapper[4753]: I1205 17:33:48.092631 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kvmdq\" (UniqueName: \"kubernetes.io/projected/c6486f24-9b1d-40a4-bfff-7126ab5b1ba4-kube-api-access-kvmdq\") pod \"placement-db-sync-d47nm\" (UID: \"c6486f24-9b1d-40a4-bfff-7126ab5b1ba4\") " pod="openstack/placement-db-sync-d47nm" Dec 05 17:33:48 crc kubenswrapper[4753]: I1205 17:33:48.092680 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6486f24-9b1d-40a4-bfff-7126ab5b1ba4-config-data\") pod \"placement-db-sync-d47nm\" (UID: \"c6486f24-9b1d-40a4-bfff-7126ab5b1ba4\") " pod="openstack/placement-db-sync-d47nm" Dec 05 17:33:48 crc kubenswrapper[4753]: I1205 17:33:48.092704 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/562378ad-0bd6-4143-bc1e-331ec844e38f-combined-ca-bundle\") pod \"barbican-db-sync-l429g\" (UID: \"562378ad-0bd6-4143-bc1e-331ec844e38f\") " pod="openstack/barbican-db-sync-l429g" Dec 05 17:33:48 crc kubenswrapper[4753]: I1205 17:33:48.092735 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6486f24-9b1d-40a4-bfff-7126ab5b1ba4-combined-ca-bundle\") pod \"placement-db-sync-d47nm\" (UID: \"c6486f24-9b1d-40a4-bfff-7126ab5b1ba4\") " pod="openstack/placement-db-sync-d47nm" Dec 05 17:33:48 crc kubenswrapper[4753]: I1205 17:33:48.092811 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zc7jr\" (UniqueName: \"kubernetes.io/projected/562378ad-0bd6-4143-bc1e-331ec844e38f-kube-api-access-zc7jr\") pod \"barbican-db-sync-l429g\" (UID: \"562378ad-0bd6-4143-bc1e-331ec844e38f\") " pod="openstack/barbican-db-sync-l429g" Dec 05 17:33:48 crc kubenswrapper[4753]: I1205 17:33:48.092854 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/562378ad-0bd6-4143-bc1e-331ec844e38f-db-sync-config-data\") pod \"barbican-db-sync-l429g\" (UID: \"562378ad-0bd6-4143-bc1e-331ec844e38f\") " pod="openstack/barbican-db-sync-l429g" Dec 05 17:33:48 crc kubenswrapper[4753]: I1205 17:33:48.092878 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c6486f24-9b1d-40a4-bfff-7126ab5b1ba4-scripts\") pod \"placement-db-sync-d47nm\" (UID: \"c6486f24-9b1d-40a4-bfff-7126ab5b1ba4\") " pod="openstack/placement-db-sync-d47nm" Dec 05 17:33:48 crc kubenswrapper[4753]: I1205 17:33:48.092907 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c6486f24-9b1d-40a4-bfff-7126ab5b1ba4-logs\") pod \"placement-db-sync-d47nm\" (UID: \"c6486f24-9b1d-40a4-bfff-7126ab5b1ba4\") " pod="openstack/placement-db-sync-d47nm" Dec 05 17:33:48 crc kubenswrapper[4753]: I1205 17:33:48.194513 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c6486f24-9b1d-40a4-bfff-7126ab5b1ba4-logs\") pod \"placement-db-sync-d47nm\" (UID: \"c6486f24-9b1d-40a4-bfff-7126ab5b1ba4\") " pod="openstack/placement-db-sync-d47nm" Dec 05 17:33:48 crc kubenswrapper[4753]: I1205 17:33:48.194588 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kvmdq\" (UniqueName: \"kubernetes.io/projected/c6486f24-9b1d-40a4-bfff-7126ab5b1ba4-kube-api-access-kvmdq\") pod \"placement-db-sync-d47nm\" (UID: \"c6486f24-9b1d-40a4-bfff-7126ab5b1ba4\") " pod="openstack/placement-db-sync-d47nm" Dec 05 17:33:48 crc kubenswrapper[4753]: I1205 17:33:48.194626 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6486f24-9b1d-40a4-bfff-7126ab5b1ba4-config-data\") pod \"placement-db-sync-d47nm\" (UID: \"c6486f24-9b1d-40a4-bfff-7126ab5b1ba4\") " pod="openstack/placement-db-sync-d47nm" Dec 05 17:33:48 crc kubenswrapper[4753]: I1205 17:33:48.194644 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/562378ad-0bd6-4143-bc1e-331ec844e38f-combined-ca-bundle\") pod \"barbican-db-sync-l429g\" (UID: \"562378ad-0bd6-4143-bc1e-331ec844e38f\") " pod="openstack/barbican-db-sync-l429g" Dec 05 17:33:48 crc kubenswrapper[4753]: I1205 17:33:48.194666 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/914bdf90-d9e9-4a6c-8c20-2512642669e1-config\") pod \"dnsmasq-dns-fcfdd6f9f-p8h8t\" (UID: \"914bdf90-d9e9-4a6c-8c20-2512642669e1\") " pod="openstack/dnsmasq-dns-fcfdd6f9f-p8h8t" Dec 05 17:33:48 crc kubenswrapper[4753]: I1205 17:33:48.194702 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6486f24-9b1d-40a4-bfff-7126ab5b1ba4-combined-ca-bundle\") pod \"placement-db-sync-d47nm\" (UID: \"c6486f24-9b1d-40a4-bfff-7126ab5b1ba4\") " pod="openstack/placement-db-sync-d47nm" Dec 05 17:33:48 crc kubenswrapper[4753]: I1205 17:33:48.194735 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/914bdf90-d9e9-4a6c-8c20-2512642669e1-ovsdbserver-sb\") pod \"dnsmasq-dns-fcfdd6f9f-p8h8t\" (UID: \"914bdf90-d9e9-4a6c-8c20-2512642669e1\") " pod="openstack/dnsmasq-dns-fcfdd6f9f-p8h8t" Dec 05 17:33:48 crc kubenswrapper[4753]: I1205 17:33:48.194777 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/914bdf90-d9e9-4a6c-8c20-2512642669e1-ovsdbserver-nb\") pod \"dnsmasq-dns-fcfdd6f9f-p8h8t\" (UID: \"914bdf90-d9e9-4a6c-8c20-2512642669e1\") " pod="openstack/dnsmasq-dns-fcfdd6f9f-p8h8t" Dec 05 17:33:48 crc kubenswrapper[4753]: I1205 17:33:48.194821 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/914bdf90-d9e9-4a6c-8c20-2512642669e1-dns-svc\") pod \"dnsmasq-dns-fcfdd6f9f-p8h8t\" (UID: \"914bdf90-d9e9-4a6c-8c20-2512642669e1\") " pod="openstack/dnsmasq-dns-fcfdd6f9f-p8h8t" Dec 05 17:33:48 crc kubenswrapper[4753]: I1205 17:33:48.194840 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zc7jr\" (UniqueName: \"kubernetes.io/projected/562378ad-0bd6-4143-bc1e-331ec844e38f-kube-api-access-zc7jr\") pod \"barbican-db-sync-l429g\" (UID: \"562378ad-0bd6-4143-bc1e-331ec844e38f\") " pod="openstack/barbican-db-sync-l429g" Dec 05 17:33:48 crc kubenswrapper[4753]: I1205 17:33:48.194888 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pw7zv\" (UniqueName: \"kubernetes.io/projected/914bdf90-d9e9-4a6c-8c20-2512642669e1-kube-api-access-pw7zv\") pod \"dnsmasq-dns-fcfdd6f9f-p8h8t\" (UID: \"914bdf90-d9e9-4a6c-8c20-2512642669e1\") " pod="openstack/dnsmasq-dns-fcfdd6f9f-p8h8t" Dec 05 17:33:48 crc kubenswrapper[4753]: I1205 17:33:48.194919 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/562378ad-0bd6-4143-bc1e-331ec844e38f-db-sync-config-data\") pod \"barbican-db-sync-l429g\" (UID: \"562378ad-0bd6-4143-bc1e-331ec844e38f\") " pod="openstack/barbican-db-sync-l429g" Dec 05 17:33:48 crc kubenswrapper[4753]: I1205 17:33:48.194940 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/914bdf90-d9e9-4a6c-8c20-2512642669e1-dns-swift-storage-0\") pod \"dnsmasq-dns-fcfdd6f9f-p8h8t\" (UID: \"914bdf90-d9e9-4a6c-8c20-2512642669e1\") " pod="openstack/dnsmasq-dns-fcfdd6f9f-p8h8t" Dec 05 17:33:48 crc kubenswrapper[4753]: I1205 17:33:48.194957 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c6486f24-9b1d-40a4-bfff-7126ab5b1ba4-scripts\") pod \"placement-db-sync-d47nm\" (UID: \"c6486f24-9b1d-40a4-bfff-7126ab5b1ba4\") " pod="openstack/placement-db-sync-d47nm" Dec 05 17:33:48 crc kubenswrapper[4753]: I1205 17:33:48.195958 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c6486f24-9b1d-40a4-bfff-7126ab5b1ba4-logs\") pod \"placement-db-sync-d47nm\" (UID: \"c6486f24-9b1d-40a4-bfff-7126ab5b1ba4\") " pod="openstack/placement-db-sync-d47nm" Dec 05 17:33:48 crc kubenswrapper[4753]: I1205 17:33:48.201862 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6486f24-9b1d-40a4-bfff-7126ab5b1ba4-config-data\") pod \"placement-db-sync-d47nm\" (UID: \"c6486f24-9b1d-40a4-bfff-7126ab5b1ba4\") " pod="openstack/placement-db-sync-d47nm" Dec 05 17:33:48 crc kubenswrapper[4753]: I1205 17:33:48.208866 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6486f24-9b1d-40a4-bfff-7126ab5b1ba4-combined-ca-bundle\") pod \"placement-db-sync-d47nm\" (UID: \"c6486f24-9b1d-40a4-bfff-7126ab5b1ba4\") " pod="openstack/placement-db-sync-d47nm" Dec 05 17:33:48 crc kubenswrapper[4753]: I1205 17:33:48.209695 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/562378ad-0bd6-4143-bc1e-331ec844e38f-db-sync-config-data\") pod \"barbican-db-sync-l429g\" (UID: \"562378ad-0bd6-4143-bc1e-331ec844e38f\") " pod="openstack/barbican-db-sync-l429g" Dec 05 17:33:48 crc kubenswrapper[4753]: I1205 17:33:48.209838 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/prometheus-metric-storage-0" Dec 05 17:33:48 crc kubenswrapper[4753]: I1205 17:33:48.215697 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/562378ad-0bd6-4143-bc1e-331ec844e38f-combined-ca-bundle\") pod \"barbican-db-sync-l429g\" (UID: \"562378ad-0bd6-4143-bc1e-331ec844e38f\") " pod="openstack/barbican-db-sync-l429g" Dec 05 17:33:48 crc kubenswrapper[4753]: I1205 17:33:48.221842 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c6486f24-9b1d-40a4-bfff-7126ab5b1ba4-scripts\") pod \"placement-db-sync-d47nm\" (UID: \"c6486f24-9b1d-40a4-bfff-7126ab5b1ba4\") " pod="openstack/placement-db-sync-d47nm" Dec 05 17:33:48 crc kubenswrapper[4753]: I1205 17:33:48.236919 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zc7jr\" (UniqueName: \"kubernetes.io/projected/562378ad-0bd6-4143-bc1e-331ec844e38f-kube-api-access-zc7jr\") pod \"barbican-db-sync-l429g\" (UID: \"562378ad-0bd6-4143-bc1e-331ec844e38f\") " pod="openstack/barbican-db-sync-l429g" Dec 05 17:33:48 crc kubenswrapper[4753]: I1205 17:33:48.272404 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kvmdq\" (UniqueName: \"kubernetes.io/projected/c6486f24-9b1d-40a4-bfff-7126ab5b1ba4-kube-api-access-kvmdq\") pod \"placement-db-sync-d47nm\" (UID: \"c6486f24-9b1d-40a4-bfff-7126ab5b1ba4\") " pod="openstack/placement-db-sync-d47nm" Dec 05 17:33:48 crc kubenswrapper[4753]: I1205 17:33:48.298623 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/914bdf90-d9e9-4a6c-8c20-2512642669e1-config\") pod \"dnsmasq-dns-fcfdd6f9f-p8h8t\" (UID: \"914bdf90-d9e9-4a6c-8c20-2512642669e1\") " pod="openstack/dnsmasq-dns-fcfdd6f9f-p8h8t" Dec 05 17:33:48 crc kubenswrapper[4753]: I1205 17:33:48.299199 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/914bdf90-d9e9-4a6c-8c20-2512642669e1-ovsdbserver-sb\") pod \"dnsmasq-dns-fcfdd6f9f-p8h8t\" (UID: \"914bdf90-d9e9-4a6c-8c20-2512642669e1\") " pod="openstack/dnsmasq-dns-fcfdd6f9f-p8h8t" Dec 05 17:33:48 crc kubenswrapper[4753]: I1205 17:33:48.299392 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/914bdf90-d9e9-4a6c-8c20-2512642669e1-ovsdbserver-nb\") pod \"dnsmasq-dns-fcfdd6f9f-p8h8t\" (UID: \"914bdf90-d9e9-4a6c-8c20-2512642669e1\") " pod="openstack/dnsmasq-dns-fcfdd6f9f-p8h8t" Dec 05 17:33:48 crc kubenswrapper[4753]: I1205 17:33:48.299680 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/914bdf90-d9e9-4a6c-8c20-2512642669e1-dns-svc\") pod \"dnsmasq-dns-fcfdd6f9f-p8h8t\" (UID: \"914bdf90-d9e9-4a6c-8c20-2512642669e1\") " pod="openstack/dnsmasq-dns-fcfdd6f9f-p8h8t" Dec 05 17:33:48 crc kubenswrapper[4753]: I1205 17:33:48.300659 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/914bdf90-d9e9-4a6c-8c20-2512642669e1-ovsdbserver-sb\") pod \"dnsmasq-dns-fcfdd6f9f-p8h8t\" (UID: \"914bdf90-d9e9-4a6c-8c20-2512642669e1\") " pod="openstack/dnsmasq-dns-fcfdd6f9f-p8h8t" Dec 05 17:33:48 crc kubenswrapper[4753]: I1205 17:33:48.303218 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-sync-9vsjw" Dec 05 17:33:48 crc kubenswrapper[4753]: I1205 17:33:48.306377 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/914bdf90-d9e9-4a6c-8c20-2512642669e1-dns-svc\") pod \"dnsmasq-dns-fcfdd6f9f-p8h8t\" (UID: \"914bdf90-d9e9-4a6c-8c20-2512642669e1\") " pod="openstack/dnsmasq-dns-fcfdd6f9f-p8h8t" Dec 05 17:33:48 crc kubenswrapper[4753]: I1205 17:33:48.305874 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/914bdf90-d9e9-4a6c-8c20-2512642669e1-ovsdbserver-nb\") pod \"dnsmasq-dns-fcfdd6f9f-p8h8t\" (UID: \"914bdf90-d9e9-4a6c-8c20-2512642669e1\") " pod="openstack/dnsmasq-dns-fcfdd6f9f-p8h8t" Dec 05 17:33:48 crc kubenswrapper[4753]: I1205 17:33:48.303944 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/914bdf90-d9e9-4a6c-8c20-2512642669e1-config\") pod \"dnsmasq-dns-fcfdd6f9f-p8h8t\" (UID: \"914bdf90-d9e9-4a6c-8c20-2512642669e1\") " pod="openstack/dnsmasq-dns-fcfdd6f9f-p8h8t" Dec 05 17:33:48 crc kubenswrapper[4753]: I1205 17:33:48.308604 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pw7zv\" (UniqueName: \"kubernetes.io/projected/914bdf90-d9e9-4a6c-8c20-2512642669e1-kube-api-access-pw7zv\") pod \"dnsmasq-dns-fcfdd6f9f-p8h8t\" (UID: \"914bdf90-d9e9-4a6c-8c20-2512642669e1\") " pod="openstack/dnsmasq-dns-fcfdd6f9f-p8h8t" Dec 05 17:33:48 crc kubenswrapper[4753]: I1205 17:33:48.308693 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/914bdf90-d9e9-4a6c-8c20-2512642669e1-dns-swift-storage-0\") pod \"dnsmasq-dns-fcfdd6f9f-p8h8t\" (UID: \"914bdf90-d9e9-4a6c-8c20-2512642669e1\") " pod="openstack/dnsmasq-dns-fcfdd6f9f-p8h8t" Dec 05 17:33:48 crc kubenswrapper[4753]: I1205 17:33:48.312795 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/914bdf90-d9e9-4a6c-8c20-2512642669e1-dns-swift-storage-0\") pod \"dnsmasq-dns-fcfdd6f9f-p8h8t\" (UID: \"914bdf90-d9e9-4a6c-8c20-2512642669e1\") " pod="openstack/dnsmasq-dns-fcfdd6f9f-p8h8t" Dec 05 17:33:48 crc kubenswrapper[4753]: I1205 17:33:48.338908 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pw7zv\" (UniqueName: \"kubernetes.io/projected/914bdf90-d9e9-4a6c-8c20-2512642669e1-kube-api-access-pw7zv\") pod \"dnsmasq-dns-fcfdd6f9f-p8h8t\" (UID: \"914bdf90-d9e9-4a6c-8c20-2512642669e1\") " pod="openstack/dnsmasq-dns-fcfdd6f9f-p8h8t" Dec 05 17:33:48 crc kubenswrapper[4753]: I1205 17:33:48.346641 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-l429g" Dec 05 17:33:48 crc kubenswrapper[4753]: I1205 17:33:48.366592 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-d47nm" Dec 05 17:33:48 crc kubenswrapper[4753]: I1205 17:33:48.392637 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-fcfdd6f9f-p8h8t" Dec 05 17:33:48 crc kubenswrapper[4753]: I1205 17:33:48.509723 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-kqr6s"] Dec 05 17:33:48 crc kubenswrapper[4753]: W1205 17:33:48.559485 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod38fb36bd_4737_4f46_a15a_ae6472a03567.slice/crio-4ea6dd043e596340f8a2c08a7e4d531ea16bf9898bb2970cb33b5f0f6c9b59ee WatchSource:0}: Error finding container 4ea6dd043e596340f8a2c08a7e4d531ea16bf9898bb2970cb33b5f0f6c9b59ee: Status 404 returned error can't find the container with id 4ea6dd043e596340f8a2c08a7e4d531ea16bf9898bb2970cb33b5f0f6c9b59ee Dec 05 17:33:48 crc kubenswrapper[4753]: I1205 17:33:48.807413 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-f6c6z"] Dec 05 17:33:48 crc kubenswrapper[4753]: I1205 17:33:48.912846 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6f8c45789f-78zf9"] Dec 05 17:33:48 crc kubenswrapper[4753]: W1205 17:33:48.924113 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5d9e3a47_4d44_490a_bfa0_34d0e0e68373.slice/crio-e0474c0510e0a78846fd0c922ae5018a2458fa77e6e4aae16e46626699e090c9 WatchSource:0}: Error finding container e0474c0510e0a78846fd0c922ae5018a2458fa77e6e4aae16e46626699e090c9: Status 404 returned error can't find the container with id e0474c0510e0a78846fd0c922ae5018a2458fa77e6e4aae16e46626699e090c9 Dec 05 17:33:49 crc kubenswrapper[4753]: I1205 17:33:49.129777 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-f6c6z" event={"ID":"68a4a494-d4ff-43ee-a74c-4f0377d229ec","Type":"ContainerStarted","Data":"6cbd5092e3875836c748f4ff566a0ac135c05f3b80857ed05ccbb2de2bf200d7"} Dec 05 17:33:49 crc kubenswrapper[4753]: I1205 17:33:49.131099 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-kqr6s" event={"ID":"38fb36bd-4737-4f46-a15a-ae6472a03567","Type":"ContainerStarted","Data":"418ce809484f34822deed56f5718ae8c28ae220b7a1026936f73ce1f6350bb9c"} Dec 05 17:33:49 crc kubenswrapper[4753]: I1205 17:33:49.131123 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-kqr6s" event={"ID":"38fb36bd-4737-4f46-a15a-ae6472a03567","Type":"ContainerStarted","Data":"4ea6dd043e596340f8a2c08a7e4d531ea16bf9898bb2970cb33b5f0f6c9b59ee"} Dec 05 17:33:49 crc kubenswrapper[4753]: I1205 17:33:49.134992 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6f8c45789f-78zf9" event={"ID":"5d9e3a47-4d44-490a-bfa0-34d0e0e68373","Type":"ContainerStarted","Data":"e0474c0510e0a78846fd0c922ae5018a2458fa77e6e4aae16e46626699e090c9"} Dec 05 17:33:49 crc kubenswrapper[4753]: I1205 17:33:49.135202 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6d5b6d6b67-4tlsv" podUID="2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0" containerName="dnsmasq-dns" containerID="cri-o://177ff1b53a1f100636c36122cb464c037e581831941a935a655c7dcbe4a6ee49" gracePeriod=10 Dec 05 17:33:49 crc kubenswrapper[4753]: I1205 17:33:49.152526 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-kqr6s" podStartSLOduration=2.152509675 podStartE2EDuration="2.152509675s" podCreationTimestamp="2025-12-05 17:33:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:33:49.147587236 +0000 UTC m=+1767.650694262" watchObservedRunningTime="2025-12-05 17:33:49.152509675 +0000 UTC m=+1767.655616681" Dec 05 17:33:49 crc kubenswrapper[4753]: I1205 17:33:49.191088 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:33:49 crc kubenswrapper[4753]: I1205 17:33:49.200631 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-rwqhq"] Dec 05 17:33:49 crc kubenswrapper[4753]: I1205 17:33:49.370398 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-db-sync-9vsjw"] Dec 05 17:33:49 crc kubenswrapper[4753]: I1205 17:33:49.421913 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-l429g"] Dec 05 17:33:49 crc kubenswrapper[4753]: I1205 17:33:49.433819 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-fcfdd6f9f-p8h8t"] Dec 05 17:33:49 crc kubenswrapper[4753]: I1205 17:33:49.614107 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-d47nm"] Dec 05 17:33:49 crc kubenswrapper[4753]: I1205 17:33:49.657346 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d5b6d6b67-4tlsv" Dec 05 17:33:49 crc kubenswrapper[4753]: I1205 17:33:49.766386 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0-dns-swift-storage-0\") pod \"2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0\" (UID: \"2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0\") " Dec 05 17:33:49 crc kubenswrapper[4753]: I1205 17:33:49.766483 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0-dns-svc\") pod \"2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0\" (UID: \"2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0\") " Dec 05 17:33:49 crc kubenswrapper[4753]: I1205 17:33:49.766536 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0-config\") pod \"2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0\" (UID: \"2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0\") " Dec 05 17:33:49 crc kubenswrapper[4753]: I1205 17:33:49.766579 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-csrqm\" (UniqueName: \"kubernetes.io/projected/2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0-kube-api-access-csrqm\") pod \"2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0\" (UID: \"2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0\") " Dec 05 17:33:49 crc kubenswrapper[4753]: I1205 17:33:49.766599 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0-ovsdbserver-sb\") pod \"2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0\" (UID: \"2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0\") " Dec 05 17:33:49 crc kubenswrapper[4753]: I1205 17:33:49.766736 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0-ovsdbserver-nb\") pod \"2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0\" (UID: \"2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0\") " Dec 05 17:33:49 crc kubenswrapper[4753]: I1205 17:33:49.788911 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0-kube-api-access-csrqm" (OuterVolumeSpecName: "kube-api-access-csrqm") pod "2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0" (UID: "2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0"). InnerVolumeSpecName "kube-api-access-csrqm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:49 crc kubenswrapper[4753]: I1205 17:33:49.870042 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-csrqm\" (UniqueName: \"kubernetes.io/projected/2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0-kube-api-access-csrqm\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:50 crc kubenswrapper[4753]: I1205 17:33:50.171369 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0" (UID: "2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:50 crc kubenswrapper[4753]: I1205 17:33:50.192288 4753 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:50 crc kubenswrapper[4753]: I1205 17:33:50.210833 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0" (UID: "2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:50 crc kubenswrapper[4753]: I1205 17:33:50.230329 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0-config" (OuterVolumeSpecName: "config") pod "2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0" (UID: "2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:50 crc kubenswrapper[4753]: I1205 17:33:50.249003 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0" (UID: "2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:50 crc kubenswrapper[4753]: I1205 17:33:50.250570 4753 generic.go:334] "Generic (PLEG): container finished" podID="5d9e3a47-4d44-490a-bfa0-34d0e0e68373" containerID="f755eddb069f9db8f5b0722995c57e6910b9b4cbb6da727deff4b549a8e34f06" exitCode=0 Dec 05 17:33:50 crc kubenswrapper[4753]: I1205 17:33:50.252418 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0" (UID: "2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:50 crc kubenswrapper[4753]: I1205 17:33:50.294317 4753 generic.go:334] "Generic (PLEG): container finished" podID="2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0" containerID="177ff1b53a1f100636c36122cb464c037e581831941a935a655c7dcbe4a6ee49" exitCode=0 Dec 05 17:33:50 crc kubenswrapper[4753]: I1205 17:33:50.294808 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6d5b6d6b67-4tlsv" Dec 05 17:33:50 crc kubenswrapper[4753]: I1205 17:33:50.299825 4753 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:50 crc kubenswrapper[4753]: I1205 17:33:50.299872 4753 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:50 crc kubenswrapper[4753]: I1205 17:33:50.299881 4753 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:50 crc kubenswrapper[4753]: I1205 17:33:50.299890 4753 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:50 crc kubenswrapper[4753]: I1205 17:33:50.411087 4753 kubelet_pods.go:2476] "Failed to reduce cpu time for pod pending volume cleanup" podUID="2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0" err="openat2 /sys/fs/cgroup/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2a7a89d0_87ed_4add_a8ba_0e0bd9fd22a0.slice/cgroup.controllers: no such file or directory" Dec 05 17:33:50 crc kubenswrapper[4753]: I1205 17:33:50.411185 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-sync-9vsjw" event={"ID":"f5711686-6b64-450f-b2b4-6583dab08275","Type":"ContainerStarted","Data":"a40dad9343c3f0581c8eed8a3c26c14028f295eba2e08338ebe58184ec8e2568"} Dec 05 17:33:50 crc kubenswrapper[4753]: I1205 17:33:50.411208 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:33:50 crc kubenswrapper[4753]: I1205 17:33:50.411232 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6f8c45789f-78zf9" event={"ID":"5d9e3a47-4d44-490a-bfa0-34d0e0e68373","Type":"ContainerDied","Data":"f755eddb069f9db8f5b0722995c57e6910b9b4cbb6da727deff4b549a8e34f06"} Dec 05 17:33:50 crc kubenswrapper[4753]: I1205 17:33:50.411245 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d5b6d6b67-4tlsv" event={"ID":"2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0","Type":"ContainerDied","Data":"177ff1b53a1f100636c36122cb464c037e581831941a935a655c7dcbe4a6ee49"} Dec 05 17:33:50 crc kubenswrapper[4753]: I1205 17:33:50.411258 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6d5b6d6b67-4tlsv" event={"ID":"2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0","Type":"ContainerDied","Data":"afcf3389ccd48faf77422f803036bb34916e06d86bcec8e0ee18cafca2148c1c"} Dec 05 17:33:50 crc kubenswrapper[4753]: I1205 17:33:50.411267 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fcfdd6f9f-p8h8t" event={"ID":"914bdf90-d9e9-4a6c-8c20-2512642669e1","Type":"ContainerStarted","Data":"e1ff7227b5d0fe19466c00a3c27da19f7e2afaadb71baf44f70b3855cea4c1fe"} Dec 05 17:33:50 crc kubenswrapper[4753]: I1205 17:33:50.411277 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-d47nm" event={"ID":"c6486f24-9b1d-40a4-bfff-7126ab5b1ba4","Type":"ContainerStarted","Data":"c3a40add4428256bb981cae972b2f8d931e760e5565dfcaa08a3b126d52afd48"} Dec 05 17:33:50 crc kubenswrapper[4753]: I1205 17:33:50.411294 4753 scope.go:117] "RemoveContainer" containerID="177ff1b53a1f100636c36122cb464c037e581831941a935a655c7dcbe4a6ee49" Dec 05 17:33:50 crc kubenswrapper[4753]: I1205 17:33:50.426565 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-l429g" event={"ID":"562378ad-0bd6-4143-bc1e-331ec844e38f","Type":"ContainerStarted","Data":"2d7660b8d6b99d54905ec57f42a15d4279514535700237aff37e6d80ca2717c8"} Dec 05 17:33:50 crc kubenswrapper[4753]: I1205 17:33:50.453408 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-rwqhq" event={"ID":"bbdd764d-13d3-4597-a938-04eeb490a8ba","Type":"ContainerStarted","Data":"98ee6cc5d087d5642253f4516ee41bb7601dc040eef2c2f7656a06a05f43b060"} Dec 05 17:33:50 crc kubenswrapper[4753]: I1205 17:33:50.453456 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-rwqhq" event={"ID":"bbdd764d-13d3-4597-a938-04eeb490a8ba","Type":"ContainerStarted","Data":"78985ab4be678acc7e6927fb3eb5e300d6d976cdeaddfafe2f74d80d12a0b313"} Dec 05 17:33:50 crc kubenswrapper[4753]: I1205 17:33:50.454729 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6d5b6d6b67-4tlsv"] Dec 05 17:33:50 crc kubenswrapper[4753]: I1205 17:33:50.480076 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cffca222-0336-40c8-886f-247667322702","Type":"ContainerStarted","Data":"0478e3b1ec2a549cc7093ced1233130fda2058faddd46c0ee81a8755e7923cfb"} Dec 05 17:33:50 crc kubenswrapper[4753]: I1205 17:33:50.486587 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6d5b6d6b67-4tlsv"] Dec 05 17:33:50 crc kubenswrapper[4753]: I1205 17:33:50.530173 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-rwqhq" podStartSLOduration=3.530097797 podStartE2EDuration="3.530097797s" podCreationTimestamp="2025-12-05 17:33:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:33:50.518707584 +0000 UTC m=+1769.021814590" watchObservedRunningTime="2025-12-05 17:33:50.530097797 +0000 UTC m=+1769.033204803" Dec 05 17:33:50 crc kubenswrapper[4753]: I1205 17:33:50.539606 4753 scope.go:117] "RemoveContainer" containerID="25a89baf20c42b6d30d3cc81f5cb1ecb5455d6c69f268b8773ede736a8424e69" Dec 05 17:33:50 crc kubenswrapper[4753]: I1205 17:33:50.773434 4753 scope.go:117] "RemoveContainer" containerID="177ff1b53a1f100636c36122cb464c037e581831941a935a655c7dcbe4a6ee49" Dec 05 17:33:50 crc kubenswrapper[4753]: E1205 17:33:50.785020 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"177ff1b53a1f100636c36122cb464c037e581831941a935a655c7dcbe4a6ee49\": container with ID starting with 177ff1b53a1f100636c36122cb464c037e581831941a935a655c7dcbe4a6ee49 not found: ID does not exist" containerID="177ff1b53a1f100636c36122cb464c037e581831941a935a655c7dcbe4a6ee49" Dec 05 17:33:50 crc kubenswrapper[4753]: I1205 17:33:50.785057 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"177ff1b53a1f100636c36122cb464c037e581831941a935a655c7dcbe4a6ee49"} err="failed to get container status \"177ff1b53a1f100636c36122cb464c037e581831941a935a655c7dcbe4a6ee49\": rpc error: code = NotFound desc = could not find container \"177ff1b53a1f100636c36122cb464c037e581831941a935a655c7dcbe4a6ee49\": container with ID starting with 177ff1b53a1f100636c36122cb464c037e581831941a935a655c7dcbe4a6ee49 not found: ID does not exist" Dec 05 17:33:50 crc kubenswrapper[4753]: I1205 17:33:50.785084 4753 scope.go:117] "RemoveContainer" containerID="25a89baf20c42b6d30d3cc81f5cb1ecb5455d6c69f268b8773ede736a8424e69" Dec 05 17:33:50 crc kubenswrapper[4753]: E1205 17:33:50.801242 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"25a89baf20c42b6d30d3cc81f5cb1ecb5455d6c69f268b8773ede736a8424e69\": container with ID starting with 25a89baf20c42b6d30d3cc81f5cb1ecb5455d6c69f268b8773ede736a8424e69 not found: ID does not exist" containerID="25a89baf20c42b6d30d3cc81f5cb1ecb5455d6c69f268b8773ede736a8424e69" Dec 05 17:33:50 crc kubenswrapper[4753]: I1205 17:33:50.801289 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"25a89baf20c42b6d30d3cc81f5cb1ecb5455d6c69f268b8773ede736a8424e69"} err="failed to get container status \"25a89baf20c42b6d30d3cc81f5cb1ecb5455d6c69f268b8773ede736a8424e69\": rpc error: code = NotFound desc = could not find container \"25a89baf20c42b6d30d3cc81f5cb1ecb5455d6c69f268b8773ede736a8424e69\": container with ID starting with 25a89baf20c42b6d30d3cc81f5cb1ecb5455d6c69f268b8773ede736a8424e69 not found: ID does not exist" Dec 05 17:33:51 crc kubenswrapper[4753]: I1205 17:33:51.003021 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6f8c45789f-78zf9" Dec 05 17:33:51 crc kubenswrapper[4753]: I1205 17:33:51.128975 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5d9e3a47-4d44-490a-bfa0-34d0e0e68373-ovsdbserver-sb\") pod \"5d9e3a47-4d44-490a-bfa0-34d0e0e68373\" (UID: \"5d9e3a47-4d44-490a-bfa0-34d0e0e68373\") " Dec 05 17:33:51 crc kubenswrapper[4753]: I1205 17:33:51.129424 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5d9e3a47-4d44-490a-bfa0-34d0e0e68373-dns-swift-storage-0\") pod \"5d9e3a47-4d44-490a-bfa0-34d0e0e68373\" (UID: \"5d9e3a47-4d44-490a-bfa0-34d0e0e68373\") " Dec 05 17:33:51 crc kubenswrapper[4753]: I1205 17:33:51.129933 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5d9e3a47-4d44-490a-bfa0-34d0e0e68373-config\") pod \"5d9e3a47-4d44-490a-bfa0-34d0e0e68373\" (UID: \"5d9e3a47-4d44-490a-bfa0-34d0e0e68373\") " Dec 05 17:33:51 crc kubenswrapper[4753]: I1205 17:33:51.130271 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5d9e3a47-4d44-490a-bfa0-34d0e0e68373-ovsdbserver-nb\") pod \"5d9e3a47-4d44-490a-bfa0-34d0e0e68373\" (UID: \"5d9e3a47-4d44-490a-bfa0-34d0e0e68373\") " Dec 05 17:33:51 crc kubenswrapper[4753]: I1205 17:33:51.130330 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5d9e3a47-4d44-490a-bfa0-34d0e0e68373-dns-svc\") pod \"5d9e3a47-4d44-490a-bfa0-34d0e0e68373\" (UID: \"5d9e3a47-4d44-490a-bfa0-34d0e0e68373\") " Dec 05 17:33:51 crc kubenswrapper[4753]: I1205 17:33:51.130505 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kphb4\" (UniqueName: \"kubernetes.io/projected/5d9e3a47-4d44-490a-bfa0-34d0e0e68373-kube-api-access-kphb4\") pod \"5d9e3a47-4d44-490a-bfa0-34d0e0e68373\" (UID: \"5d9e3a47-4d44-490a-bfa0-34d0e0e68373\") " Dec 05 17:33:51 crc kubenswrapper[4753]: I1205 17:33:51.155797 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5d9e3a47-4d44-490a-bfa0-34d0e0e68373-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "5d9e3a47-4d44-490a-bfa0-34d0e0e68373" (UID: "5d9e3a47-4d44-490a-bfa0-34d0e0e68373"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:51 crc kubenswrapper[4753]: I1205 17:33:51.167649 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5d9e3a47-4d44-490a-bfa0-34d0e0e68373-kube-api-access-kphb4" (OuterVolumeSpecName: "kube-api-access-kphb4") pod "5d9e3a47-4d44-490a-bfa0-34d0e0e68373" (UID: "5d9e3a47-4d44-490a-bfa0-34d0e0e68373"). InnerVolumeSpecName "kube-api-access-kphb4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:51 crc kubenswrapper[4753]: I1205 17:33:51.175256 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5d9e3a47-4d44-490a-bfa0-34d0e0e68373-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "5d9e3a47-4d44-490a-bfa0-34d0e0e68373" (UID: "5d9e3a47-4d44-490a-bfa0-34d0e0e68373"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:51 crc kubenswrapper[4753]: I1205 17:33:51.178386 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5d9e3a47-4d44-490a-bfa0-34d0e0e68373-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "5d9e3a47-4d44-490a-bfa0-34d0e0e68373" (UID: "5d9e3a47-4d44-490a-bfa0-34d0e0e68373"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:51 crc kubenswrapper[4753]: I1205 17:33:51.185506 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5d9e3a47-4d44-490a-bfa0-34d0e0e68373-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "5d9e3a47-4d44-490a-bfa0-34d0e0e68373" (UID: "5d9e3a47-4d44-490a-bfa0-34d0e0e68373"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:51 crc kubenswrapper[4753]: I1205 17:33:51.209816 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5d9e3a47-4d44-490a-bfa0-34d0e0e68373-config" (OuterVolumeSpecName: "config") pod "5d9e3a47-4d44-490a-bfa0-34d0e0e68373" (UID: "5d9e3a47-4d44-490a-bfa0-34d0e0e68373"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:51 crc kubenswrapper[4753]: I1205 17:33:51.239723 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kphb4\" (UniqueName: \"kubernetes.io/projected/5d9e3a47-4d44-490a-bfa0-34d0e0e68373-kube-api-access-kphb4\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:51 crc kubenswrapper[4753]: I1205 17:33:51.239757 4753 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5d9e3a47-4d44-490a-bfa0-34d0e0e68373-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:51 crc kubenswrapper[4753]: I1205 17:33:51.239770 4753 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5d9e3a47-4d44-490a-bfa0-34d0e0e68373-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:51 crc kubenswrapper[4753]: I1205 17:33:51.239781 4753 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5d9e3a47-4d44-490a-bfa0-34d0e0e68373-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:51 crc kubenswrapper[4753]: I1205 17:33:51.239792 4753 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5d9e3a47-4d44-490a-bfa0-34d0e0e68373-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:51 crc kubenswrapper[4753]: I1205 17:33:51.239802 4753 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5d9e3a47-4d44-490a-bfa0-34d0e0e68373-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:51 crc kubenswrapper[4753]: E1205 17:33:51.428544 4753 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod00d86d0d_6896_435e_808f_eec9c8225f99.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod00d86d0d_6896_435e_808f_eec9c8225f99.slice/crio-2c37b039ede04a74c28a7b3b237b8ffc9042ad09c0aa4c14d4a4581c2a6c11af\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc44e1a93_e233_46a2_b18a_e6c8c396a394.slice/crio-conmon-7eb464d7df6e8d1abc098e2c5cdf2192069287ec1fec6846c7a1b7a928c9a1eb.scope\": RecentStats: unable to find data in memory cache], [\"/system.slice/rpm-ostreed.service\": RecentStats: unable to find data in memory cache]" Dec 05 17:33:51 crc kubenswrapper[4753]: I1205 17:33:51.499127 4753 generic.go:334] "Generic (PLEG): container finished" podID="914bdf90-d9e9-4a6c-8c20-2512642669e1" containerID="d4a8db04234feee1e6e730199c05a74aec436f6d81df70652a2891f8f3566af0" exitCode=0 Dec 05 17:33:51 crc kubenswrapper[4753]: I1205 17:33:51.499454 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fcfdd6f9f-p8h8t" event={"ID":"914bdf90-d9e9-4a6c-8c20-2512642669e1","Type":"ContainerDied","Data":"d4a8db04234feee1e6e730199c05a74aec436f6d81df70652a2891f8f3566af0"} Dec 05 17:33:51 crc kubenswrapper[4753]: I1205 17:33:51.505976 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6f8c45789f-78zf9" event={"ID":"5d9e3a47-4d44-490a-bfa0-34d0e0e68373","Type":"ContainerDied","Data":"e0474c0510e0a78846fd0c922ae5018a2458fa77e6e4aae16e46626699e090c9"} Dec 05 17:33:51 crc kubenswrapper[4753]: I1205 17:33:51.506026 4753 scope.go:117] "RemoveContainer" containerID="f755eddb069f9db8f5b0722995c57e6910b9b4cbb6da727deff4b549a8e34f06" Dec 05 17:33:51 crc kubenswrapper[4753]: I1205 17:33:51.506087 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6f8c45789f-78zf9" Dec 05 17:33:51 crc kubenswrapper[4753]: I1205 17:33:51.606346 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6f8c45789f-78zf9"] Dec 05 17:33:51 crc kubenswrapper[4753]: I1205 17:33:51.631054 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6f8c45789f-78zf9"] Dec 05 17:33:51 crc kubenswrapper[4753]: I1205 17:33:51.754839 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0" path="/var/lib/kubelet/pods/2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0/volumes" Dec 05 17:33:51 crc kubenswrapper[4753]: I1205 17:33:51.755772 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5d9e3a47-4d44-490a-bfa0-34d0e0e68373" path="/var/lib/kubelet/pods/5d9e3a47-4d44-490a-bfa0-34d0e0e68373/volumes" Dec 05 17:33:53 crc kubenswrapper[4753]: I1205 17:33:53.210685 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/prometheus-metric-storage-0" Dec 05 17:33:53 crc kubenswrapper[4753]: I1205 17:33:53.218629 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/prometheus-metric-storage-0" Dec 05 17:33:53 crc kubenswrapper[4753]: I1205 17:33:53.570406 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/prometheus-metric-storage-0" Dec 05 17:33:53 crc kubenswrapper[4753]: I1205 17:33:53.721422 4753 scope.go:117] "RemoveContainer" containerID="87682a74661e693e498cd793cc20d16fc9f4a3b8a1a6b54f10285e2dcd15eafd" Dec 05 17:33:53 crc kubenswrapper[4753]: E1205 17:33:53.721669 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 17:33:55 crc kubenswrapper[4753]: I1205 17:33:55.588885 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fcfdd6f9f-p8h8t" event={"ID":"914bdf90-d9e9-4a6c-8c20-2512642669e1","Type":"ContainerStarted","Data":"8fb4d1147ec040d5bba0a4a0bf4016823a8ff1debbe146223f8e94acb9d27c08"} Dec 05 17:33:55 crc kubenswrapper[4753]: I1205 17:33:55.589314 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-fcfdd6f9f-p8h8t" Dec 05 17:33:55 crc kubenswrapper[4753]: I1205 17:33:55.614217 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-fcfdd6f9f-p8h8t" podStartSLOduration=8.614136022 podStartE2EDuration="8.614136022s" podCreationTimestamp="2025-12-05 17:33:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:33:55.607933987 +0000 UTC m=+1774.111040993" watchObservedRunningTime="2025-12-05 17:33:55.614136022 +0000 UTC m=+1774.117243038" Dec 05 17:33:56 crc kubenswrapper[4753]: I1205 17:33:56.600817 4753 generic.go:334] "Generic (PLEG): container finished" podID="38fb36bd-4737-4f46-a15a-ae6472a03567" containerID="418ce809484f34822deed56f5718ae8c28ae220b7a1026936f73ce1f6350bb9c" exitCode=0 Dec 05 17:33:56 crc kubenswrapper[4753]: I1205 17:33:56.600907 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-kqr6s" event={"ID":"38fb36bd-4737-4f46-a15a-ae6472a03567","Type":"ContainerDied","Data":"418ce809484f34822deed56f5718ae8c28ae220b7a1026936f73ce1f6350bb9c"} Dec 05 17:33:59 crc kubenswrapper[4753]: I1205 17:33:59.203083 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-kqr6s" Dec 05 17:33:59 crc kubenswrapper[4753]: I1205 17:33:59.247588 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/38fb36bd-4737-4f46-a15a-ae6472a03567-credential-keys\") pod \"38fb36bd-4737-4f46-a15a-ae6472a03567\" (UID: \"38fb36bd-4737-4f46-a15a-ae6472a03567\") " Dec 05 17:33:59 crc kubenswrapper[4753]: I1205 17:33:59.247656 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/38fb36bd-4737-4f46-a15a-ae6472a03567-config-data\") pod \"38fb36bd-4737-4f46-a15a-ae6472a03567\" (UID: \"38fb36bd-4737-4f46-a15a-ae6472a03567\") " Dec 05 17:33:59 crc kubenswrapper[4753]: I1205 17:33:59.247720 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/38fb36bd-4737-4f46-a15a-ae6472a03567-fernet-keys\") pod \"38fb36bd-4737-4f46-a15a-ae6472a03567\" (UID: \"38fb36bd-4737-4f46-a15a-ae6472a03567\") " Dec 05 17:33:59 crc kubenswrapper[4753]: I1205 17:33:59.247784 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/38fb36bd-4737-4f46-a15a-ae6472a03567-scripts\") pod \"38fb36bd-4737-4f46-a15a-ae6472a03567\" (UID: \"38fb36bd-4737-4f46-a15a-ae6472a03567\") " Dec 05 17:33:59 crc kubenswrapper[4753]: I1205 17:33:59.247809 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4r5wc\" (UniqueName: \"kubernetes.io/projected/38fb36bd-4737-4f46-a15a-ae6472a03567-kube-api-access-4r5wc\") pod \"38fb36bd-4737-4f46-a15a-ae6472a03567\" (UID: \"38fb36bd-4737-4f46-a15a-ae6472a03567\") " Dec 05 17:33:59 crc kubenswrapper[4753]: I1205 17:33:59.247934 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38fb36bd-4737-4f46-a15a-ae6472a03567-combined-ca-bundle\") pod \"38fb36bd-4737-4f46-a15a-ae6472a03567\" (UID: \"38fb36bd-4737-4f46-a15a-ae6472a03567\") " Dec 05 17:33:59 crc kubenswrapper[4753]: I1205 17:33:59.256275 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/38fb36bd-4737-4f46-a15a-ae6472a03567-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "38fb36bd-4737-4f46-a15a-ae6472a03567" (UID: "38fb36bd-4737-4f46-a15a-ae6472a03567"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:59 crc kubenswrapper[4753]: I1205 17:33:59.256505 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/38fb36bd-4737-4f46-a15a-ae6472a03567-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "38fb36bd-4737-4f46-a15a-ae6472a03567" (UID: "38fb36bd-4737-4f46-a15a-ae6472a03567"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:59 crc kubenswrapper[4753]: I1205 17:33:59.256528 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/38fb36bd-4737-4f46-a15a-ae6472a03567-kube-api-access-4r5wc" (OuterVolumeSpecName: "kube-api-access-4r5wc") pod "38fb36bd-4737-4f46-a15a-ae6472a03567" (UID: "38fb36bd-4737-4f46-a15a-ae6472a03567"). InnerVolumeSpecName "kube-api-access-4r5wc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:59 crc kubenswrapper[4753]: I1205 17:33:59.258069 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/38fb36bd-4737-4f46-a15a-ae6472a03567-scripts" (OuterVolumeSpecName: "scripts") pod "38fb36bd-4737-4f46-a15a-ae6472a03567" (UID: "38fb36bd-4737-4f46-a15a-ae6472a03567"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:59 crc kubenswrapper[4753]: I1205 17:33:59.280984 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/38fb36bd-4737-4f46-a15a-ae6472a03567-config-data" (OuterVolumeSpecName: "config-data") pod "38fb36bd-4737-4f46-a15a-ae6472a03567" (UID: "38fb36bd-4737-4f46-a15a-ae6472a03567"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:59 crc kubenswrapper[4753]: I1205 17:33:59.291506 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/38fb36bd-4737-4f46-a15a-ae6472a03567-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "38fb36bd-4737-4f46-a15a-ae6472a03567" (UID: "38fb36bd-4737-4f46-a15a-ae6472a03567"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:59 crc kubenswrapper[4753]: I1205 17:33:59.350301 4753 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38fb36bd-4737-4f46-a15a-ae6472a03567-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:59 crc kubenswrapper[4753]: I1205 17:33:59.350341 4753 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/38fb36bd-4737-4f46-a15a-ae6472a03567-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:59 crc kubenswrapper[4753]: I1205 17:33:59.350350 4753 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/38fb36bd-4737-4f46-a15a-ae6472a03567-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:59 crc kubenswrapper[4753]: I1205 17:33:59.350358 4753 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/38fb36bd-4737-4f46-a15a-ae6472a03567-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:59 crc kubenswrapper[4753]: I1205 17:33:59.350369 4753 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/38fb36bd-4737-4f46-a15a-ae6472a03567-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:59 crc kubenswrapper[4753]: I1205 17:33:59.350377 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4r5wc\" (UniqueName: \"kubernetes.io/projected/38fb36bd-4737-4f46-a15a-ae6472a03567-kube-api-access-4r5wc\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:59 crc kubenswrapper[4753]: I1205 17:33:59.647063 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-kqr6s" event={"ID":"38fb36bd-4737-4f46-a15a-ae6472a03567","Type":"ContainerDied","Data":"4ea6dd043e596340f8a2c08a7e4d531ea16bf9898bb2970cb33b5f0f6c9b59ee"} Dec 05 17:33:59 crc kubenswrapper[4753]: I1205 17:33:59.647108 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4ea6dd043e596340f8a2c08a7e4d531ea16bf9898bb2970cb33b5f0f6c9b59ee" Dec 05 17:33:59 crc kubenswrapper[4753]: I1205 17:33:59.647188 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-kqr6s" Dec 05 17:34:00 crc kubenswrapper[4753]: I1205 17:34:00.306042 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-kqr6s"] Dec 05 17:34:00 crc kubenswrapper[4753]: I1205 17:34:00.316941 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-kqr6s"] Dec 05 17:34:00 crc kubenswrapper[4753]: I1205 17:34:00.397026 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-klc67"] Dec 05 17:34:00 crc kubenswrapper[4753]: E1205 17:34:00.397550 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0" containerName="init" Dec 05 17:34:00 crc kubenswrapper[4753]: I1205 17:34:00.397569 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0" containerName="init" Dec 05 17:34:00 crc kubenswrapper[4753]: E1205 17:34:00.397579 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d9e3a47-4d44-490a-bfa0-34d0e0e68373" containerName="init" Dec 05 17:34:00 crc kubenswrapper[4753]: I1205 17:34:00.397584 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d9e3a47-4d44-490a-bfa0-34d0e0e68373" containerName="init" Dec 05 17:34:00 crc kubenswrapper[4753]: E1205 17:34:00.397603 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38fb36bd-4737-4f46-a15a-ae6472a03567" containerName="keystone-bootstrap" Dec 05 17:34:00 crc kubenswrapper[4753]: I1205 17:34:00.397610 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="38fb36bd-4737-4f46-a15a-ae6472a03567" containerName="keystone-bootstrap" Dec 05 17:34:00 crc kubenswrapper[4753]: E1205 17:34:00.397622 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0" containerName="dnsmasq-dns" Dec 05 17:34:00 crc kubenswrapper[4753]: I1205 17:34:00.397629 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0" containerName="dnsmasq-dns" Dec 05 17:34:00 crc kubenswrapper[4753]: I1205 17:34:00.397822 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="2a7a89d0-87ed-4add-a8ba-0e0bd9fd22a0" containerName="dnsmasq-dns" Dec 05 17:34:00 crc kubenswrapper[4753]: I1205 17:34:00.397835 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="5d9e3a47-4d44-490a-bfa0-34d0e0e68373" containerName="init" Dec 05 17:34:00 crc kubenswrapper[4753]: I1205 17:34:00.397851 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="38fb36bd-4737-4f46-a15a-ae6472a03567" containerName="keystone-bootstrap" Dec 05 17:34:00 crc kubenswrapper[4753]: I1205 17:34:00.398787 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-klc67" Dec 05 17:34:00 crc kubenswrapper[4753]: I1205 17:34:00.402182 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 05 17:34:00 crc kubenswrapper[4753]: I1205 17:34:00.402572 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-jfr42" Dec 05 17:34:00 crc kubenswrapper[4753]: I1205 17:34:00.402745 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 05 17:34:00 crc kubenswrapper[4753]: I1205 17:34:00.402903 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 05 17:34:00 crc kubenswrapper[4753]: I1205 17:34:00.415636 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-klc67"] Dec 05 17:34:00 crc kubenswrapper[4753]: I1205 17:34:00.473229 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/5d74be7c-896f-4771-8d81-293a02d24cd5-fernet-keys\") pod \"keystone-bootstrap-klc67\" (UID: \"5d74be7c-896f-4771-8d81-293a02d24cd5\") " pod="openstack/keystone-bootstrap-klc67" Dec 05 17:34:00 crc kubenswrapper[4753]: I1205 17:34:00.473616 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d74be7c-896f-4771-8d81-293a02d24cd5-config-data\") pod \"keystone-bootstrap-klc67\" (UID: \"5d74be7c-896f-4771-8d81-293a02d24cd5\") " pod="openstack/keystone-bootstrap-klc67" Dec 05 17:34:00 crc kubenswrapper[4753]: I1205 17:34:00.473741 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/5d74be7c-896f-4771-8d81-293a02d24cd5-credential-keys\") pod \"keystone-bootstrap-klc67\" (UID: \"5d74be7c-896f-4771-8d81-293a02d24cd5\") " pod="openstack/keystone-bootstrap-klc67" Dec 05 17:34:00 crc kubenswrapper[4753]: I1205 17:34:00.473948 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d74be7c-896f-4771-8d81-293a02d24cd5-combined-ca-bundle\") pod \"keystone-bootstrap-klc67\" (UID: \"5d74be7c-896f-4771-8d81-293a02d24cd5\") " pod="openstack/keystone-bootstrap-klc67" Dec 05 17:34:00 crc kubenswrapper[4753]: I1205 17:34:00.474019 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s5xr5\" (UniqueName: \"kubernetes.io/projected/5d74be7c-896f-4771-8d81-293a02d24cd5-kube-api-access-s5xr5\") pod \"keystone-bootstrap-klc67\" (UID: \"5d74be7c-896f-4771-8d81-293a02d24cd5\") " pod="openstack/keystone-bootstrap-klc67" Dec 05 17:34:00 crc kubenswrapper[4753]: I1205 17:34:00.474061 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5d74be7c-896f-4771-8d81-293a02d24cd5-scripts\") pod \"keystone-bootstrap-klc67\" (UID: \"5d74be7c-896f-4771-8d81-293a02d24cd5\") " pod="openstack/keystone-bootstrap-klc67" Dec 05 17:34:00 crc kubenswrapper[4753]: I1205 17:34:00.576065 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d74be7c-896f-4771-8d81-293a02d24cd5-config-data\") pod \"keystone-bootstrap-klc67\" (UID: \"5d74be7c-896f-4771-8d81-293a02d24cd5\") " pod="openstack/keystone-bootstrap-klc67" Dec 05 17:34:00 crc kubenswrapper[4753]: I1205 17:34:00.576190 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/5d74be7c-896f-4771-8d81-293a02d24cd5-credential-keys\") pod \"keystone-bootstrap-klc67\" (UID: \"5d74be7c-896f-4771-8d81-293a02d24cd5\") " pod="openstack/keystone-bootstrap-klc67" Dec 05 17:34:00 crc kubenswrapper[4753]: I1205 17:34:00.576277 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d74be7c-896f-4771-8d81-293a02d24cd5-combined-ca-bundle\") pod \"keystone-bootstrap-klc67\" (UID: \"5d74be7c-896f-4771-8d81-293a02d24cd5\") " pod="openstack/keystone-bootstrap-klc67" Dec 05 17:34:00 crc kubenswrapper[4753]: I1205 17:34:00.576344 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s5xr5\" (UniqueName: \"kubernetes.io/projected/5d74be7c-896f-4771-8d81-293a02d24cd5-kube-api-access-s5xr5\") pod \"keystone-bootstrap-klc67\" (UID: \"5d74be7c-896f-4771-8d81-293a02d24cd5\") " pod="openstack/keystone-bootstrap-klc67" Dec 05 17:34:00 crc kubenswrapper[4753]: I1205 17:34:00.576393 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5d74be7c-896f-4771-8d81-293a02d24cd5-scripts\") pod \"keystone-bootstrap-klc67\" (UID: \"5d74be7c-896f-4771-8d81-293a02d24cd5\") " pod="openstack/keystone-bootstrap-klc67" Dec 05 17:34:00 crc kubenswrapper[4753]: I1205 17:34:00.576472 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/5d74be7c-896f-4771-8d81-293a02d24cd5-fernet-keys\") pod \"keystone-bootstrap-klc67\" (UID: \"5d74be7c-896f-4771-8d81-293a02d24cd5\") " pod="openstack/keystone-bootstrap-klc67" Dec 05 17:34:00 crc kubenswrapper[4753]: I1205 17:34:00.585062 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5d74be7c-896f-4771-8d81-293a02d24cd5-scripts\") pod \"keystone-bootstrap-klc67\" (UID: \"5d74be7c-896f-4771-8d81-293a02d24cd5\") " pod="openstack/keystone-bootstrap-klc67" Dec 05 17:34:00 crc kubenswrapper[4753]: I1205 17:34:00.585233 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d74be7c-896f-4771-8d81-293a02d24cd5-combined-ca-bundle\") pod \"keystone-bootstrap-klc67\" (UID: \"5d74be7c-896f-4771-8d81-293a02d24cd5\") " pod="openstack/keystone-bootstrap-klc67" Dec 05 17:34:00 crc kubenswrapper[4753]: I1205 17:34:00.586677 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d74be7c-896f-4771-8d81-293a02d24cd5-config-data\") pod \"keystone-bootstrap-klc67\" (UID: \"5d74be7c-896f-4771-8d81-293a02d24cd5\") " pod="openstack/keystone-bootstrap-klc67" Dec 05 17:34:00 crc kubenswrapper[4753]: I1205 17:34:00.587097 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/5d74be7c-896f-4771-8d81-293a02d24cd5-credential-keys\") pod \"keystone-bootstrap-klc67\" (UID: \"5d74be7c-896f-4771-8d81-293a02d24cd5\") " pod="openstack/keystone-bootstrap-klc67" Dec 05 17:34:00 crc kubenswrapper[4753]: I1205 17:34:00.595378 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s5xr5\" (UniqueName: \"kubernetes.io/projected/5d74be7c-896f-4771-8d81-293a02d24cd5-kube-api-access-s5xr5\") pod \"keystone-bootstrap-klc67\" (UID: \"5d74be7c-896f-4771-8d81-293a02d24cd5\") " pod="openstack/keystone-bootstrap-klc67" Dec 05 17:34:00 crc kubenswrapper[4753]: I1205 17:34:00.606471 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/5d74be7c-896f-4771-8d81-293a02d24cd5-fernet-keys\") pod \"keystone-bootstrap-klc67\" (UID: \"5d74be7c-896f-4771-8d81-293a02d24cd5\") " pod="openstack/keystone-bootstrap-klc67" Dec 05 17:34:00 crc kubenswrapper[4753]: I1205 17:34:00.720864 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-klc67" Dec 05 17:34:01 crc kubenswrapper[4753]: E1205 17:34:01.700079 4753 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod00d86d0d_6896_435e_808f_eec9c8225f99.slice\": RecentStats: unable to find data in memory cache], [\"/system.slice/rpm-ostreed.service\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc44e1a93_e233_46a2_b18a_e6c8c396a394.slice/crio-conmon-7eb464d7df6e8d1abc098e2c5cdf2192069287ec1fec6846c7a1b7a928c9a1eb.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod00d86d0d_6896_435e_808f_eec9c8225f99.slice/crio-2c37b039ede04a74c28a7b3b237b8ffc9042ad09c0aa4c14d4a4581c2a6c11af\": RecentStats: unable to find data in memory cache]" Dec 05 17:34:01 crc kubenswrapper[4753]: I1205 17:34:01.738271 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="38fb36bd-4737-4f46-a15a-ae6472a03567" path="/var/lib/kubelet/pods/38fb36bd-4737-4f46-a15a-ae6472a03567/volumes" Dec 05 17:34:03 crc kubenswrapper[4753]: I1205 17:34:03.395437 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-fcfdd6f9f-p8h8t" Dec 05 17:34:03 crc kubenswrapper[4753]: I1205 17:34:03.475692 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-562p5"] Dec 05 17:34:03 crc kubenswrapper[4753]: I1205 17:34:03.480959 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-b8fbc5445-562p5" podUID="e1e33c8a-695c-42cb-9e55-3a5346413faf" containerName="dnsmasq-dns" containerID="cri-o://bdeccf61f1b07b7d8bf5078dca6eb5383f15e0b0a2409c978233e3988824a823" gracePeriod=10 Dec 05 17:34:04 crc kubenswrapper[4753]: I1205 17:34:04.741776 4753 generic.go:334] "Generic (PLEG): container finished" podID="e1e33c8a-695c-42cb-9e55-3a5346413faf" containerID="bdeccf61f1b07b7d8bf5078dca6eb5383f15e0b0a2409c978233e3988824a823" exitCode=0 Dec 05 17:34:04 crc kubenswrapper[4753]: I1205 17:34:04.741862 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-562p5" event={"ID":"e1e33c8a-695c-42cb-9e55-3a5346413faf","Type":"ContainerDied","Data":"bdeccf61f1b07b7d8bf5078dca6eb5383f15e0b0a2409c978233e3988824a823"} Dec 05 17:34:07 crc kubenswrapper[4753]: I1205 17:34:07.333743 4753 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-b8fbc5445-562p5" podUID="e1e33c8a-695c-42cb-9e55-3a5346413faf" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.137:5353: connect: connection refused" Dec 05 17:34:08 crc kubenswrapper[4753]: E1205 17:34:08.689018 4753 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified" Dec 05 17:34:08 crc kubenswrapper[4753]: E1205 17:34:08.689561 4753 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ceilometer-central-agent,Image:quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n84h544h59bh565h5bbh545h559h85h54bh8bh7fhcdh65bh546h75h589hdfh54bh65h68chb6hc4h86h567h567h68bh56h584h94h644h569h699q,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/var/lib/openstack/bin,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:ceilometer-central-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-7klpl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/python3 /var/lib/openstack/bin/centralhealth.py],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:300,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ceilometer-0_openstack(cffca222-0336-40c8-886f-247667322702): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 17:34:08 crc kubenswrapper[4753]: I1205 17:34:08.721048 4753 scope.go:117] "RemoveContainer" containerID="87682a74661e693e498cd793cc20d16fc9f4a3b8a1a6b54f10285e2dcd15eafd" Dec 05 17:34:08 crc kubenswrapper[4753]: E1205 17:34:08.721306 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 17:34:08 crc kubenswrapper[4753]: I1205 17:34:08.790879 4753 generic.go:334] "Generic (PLEG): container finished" podID="98474d5b-9d55-4704-a02c-1fd4ccf3f2f9" containerID="ebd79e80aaf3fe4c3fc1df9b6c3f177dca356f7f9bf9f677ad32e21e54bff97b" exitCode=0 Dec 05 17:34:08 crc kubenswrapper[4753]: I1205 17:34:08.790925 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-jzgbr" event={"ID":"98474d5b-9d55-4704-a02c-1fd4ccf3f2f9","Type":"ContainerDied","Data":"ebd79e80aaf3fe4c3fc1df9b6c3f177dca356f7f9bf9f677ad32e21e54bff97b"} Dec 05 17:34:11 crc kubenswrapper[4753]: E1205 17:34:11.995891 4753 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod00d86d0d_6896_435e_808f_eec9c8225f99.slice/crio-2c37b039ede04a74c28a7b3b237b8ffc9042ad09c0aa4c14d4a4581c2a6c11af\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod00d86d0d_6896_435e_808f_eec9c8225f99.slice\": RecentStats: unable to find data in memory cache], [\"/system.slice/rpm-ostreed.service\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc44e1a93_e233_46a2_b18a_e6c8c396a394.slice/crio-conmon-7eb464d7df6e8d1abc098e2c5cdf2192069287ec1fec6846c7a1b7a928c9a1eb.scope\": RecentStats: unable to find data in memory cache]" Dec 05 17:34:12 crc kubenswrapper[4753]: I1205 17:34:12.334237 4753 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-b8fbc5445-562p5" podUID="e1e33c8a-695c-42cb-9e55-3a5346413faf" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.137:5353: connect: connection refused" Dec 05 17:34:15 crc kubenswrapper[4753]: I1205 17:34:15.865357 4753 generic.go:334] "Generic (PLEG): container finished" podID="bbdd764d-13d3-4597-a938-04eeb490a8ba" containerID="98ee6cc5d087d5642253f4516ee41bb7601dc040eef2c2f7656a06a05f43b060" exitCode=0 Dec 05 17:34:15 crc kubenswrapper[4753]: I1205 17:34:15.865451 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-rwqhq" event={"ID":"bbdd764d-13d3-4597-a938-04eeb490a8ba","Type":"ContainerDied","Data":"98ee6cc5d087d5642253f4516ee41bb7601dc040eef2c2f7656a06a05f43b060"} Dec 05 17:34:17 crc kubenswrapper[4753]: I1205 17:34:17.333266 4753 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-b8fbc5445-562p5" podUID="e1e33c8a-695c-42cb-9e55-3a5346413faf" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.137:5353: connect: connection refused" Dec 05 17:34:17 crc kubenswrapper[4753]: I1205 17:34:17.333647 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-b8fbc5445-562p5" Dec 05 17:34:18 crc kubenswrapper[4753]: E1205 17:34:18.249424 4753 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified" Dec 05 17:34:18 crc kubenswrapper[4753]: E1205 17:34:18.249577 4753 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-t57k6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-f6c6z_openstack(68a4a494-d4ff-43ee-a74c-4f0377d229ec): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 17:34:18 crc kubenswrapper[4753]: E1205 17:34:18.250796 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-f6c6z" podUID="68a4a494-d4ff-43ee-a74c-4f0377d229ec" Dec 05 17:34:18 crc kubenswrapper[4753]: E1205 17:34:18.646687 4753 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified" Dec 05 17:34:18 crc kubenswrapper[4753]: E1205 17:34:18.646844 4753 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:barbican-db-sync,Image:quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified,Command:[/bin/bash],Args:[-c barbican-manage db upgrade],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/barbican/barbican.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-zc7jr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42403,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42403,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-db-sync-l429g_openstack(562378ad-0bd6-4143-bc1e-331ec844e38f): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 17:34:18 crc kubenswrapper[4753]: E1205 17:34:18.648516 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/barbican-db-sync-l429g" podUID="562378ad-0bd6-4143-bc1e-331ec844e38f" Dec 05 17:34:18 crc kubenswrapper[4753]: E1205 17:34:18.899965 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified\\\"\"" pod="openstack/barbican-db-sync-l429g" podUID="562378ad-0bd6-4143-bc1e-331ec844e38f" Dec 05 17:34:18 crc kubenswrapper[4753]: E1205 17:34:18.900385 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified\\\"\"" pod="openstack/cinder-db-sync-f6c6z" podUID="68a4a494-d4ff-43ee-a74c-4f0377d229ec" Dec 05 17:34:19 crc kubenswrapper[4753]: I1205 17:34:19.247488 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-jzgbr" Dec 05 17:34:19 crc kubenswrapper[4753]: I1205 17:34:19.255473 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-rwqhq" Dec 05 17:34:19 crc kubenswrapper[4753]: I1205 17:34:19.270087 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-562p5" Dec 05 17:34:19 crc kubenswrapper[4753]: I1205 17:34:19.387072 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/98474d5b-9d55-4704-a02c-1fd4ccf3f2f9-db-sync-config-data\") pod \"98474d5b-9d55-4704-a02c-1fd4ccf3f2f9\" (UID: \"98474d5b-9d55-4704-a02c-1fd4ccf3f2f9\") " Dec 05 17:34:19 crc kubenswrapper[4753]: I1205 17:34:19.387121 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lflvw\" (UniqueName: \"kubernetes.io/projected/bbdd764d-13d3-4597-a938-04eeb490a8ba-kube-api-access-lflvw\") pod \"bbdd764d-13d3-4597-a938-04eeb490a8ba\" (UID: \"bbdd764d-13d3-4597-a938-04eeb490a8ba\") " Dec 05 17:34:19 crc kubenswrapper[4753]: I1205 17:34:19.387225 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4w8vw\" (UniqueName: \"kubernetes.io/projected/98474d5b-9d55-4704-a02c-1fd4ccf3f2f9-kube-api-access-4w8vw\") pod \"98474d5b-9d55-4704-a02c-1fd4ccf3f2f9\" (UID: \"98474d5b-9d55-4704-a02c-1fd4ccf3f2f9\") " Dec 05 17:34:19 crc kubenswrapper[4753]: I1205 17:34:19.387294 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/bbdd764d-13d3-4597-a938-04eeb490a8ba-config\") pod \"bbdd764d-13d3-4597-a938-04eeb490a8ba\" (UID: \"bbdd764d-13d3-4597-a938-04eeb490a8ba\") " Dec 05 17:34:19 crc kubenswrapper[4753]: I1205 17:34:19.387333 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/98474d5b-9d55-4704-a02c-1fd4ccf3f2f9-combined-ca-bundle\") pod \"98474d5b-9d55-4704-a02c-1fd4ccf3f2f9\" (UID: \"98474d5b-9d55-4704-a02c-1fd4ccf3f2f9\") " Dec 05 17:34:19 crc kubenswrapper[4753]: I1205 17:34:19.387370 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bbdd764d-13d3-4597-a938-04eeb490a8ba-combined-ca-bundle\") pod \"bbdd764d-13d3-4597-a938-04eeb490a8ba\" (UID: \"bbdd764d-13d3-4597-a938-04eeb490a8ba\") " Dec 05 17:34:19 crc kubenswrapper[4753]: I1205 17:34:19.387405 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e1e33c8a-695c-42cb-9e55-3a5346413faf-dns-svc\") pod \"e1e33c8a-695c-42cb-9e55-3a5346413faf\" (UID: \"e1e33c8a-695c-42cb-9e55-3a5346413faf\") " Dec 05 17:34:19 crc kubenswrapper[4753]: I1205 17:34:19.387429 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e1e33c8a-695c-42cb-9e55-3a5346413faf-config\") pod \"e1e33c8a-695c-42cb-9e55-3a5346413faf\" (UID: \"e1e33c8a-695c-42cb-9e55-3a5346413faf\") " Dec 05 17:34:19 crc kubenswrapper[4753]: I1205 17:34:19.387456 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8m8s6\" (UniqueName: \"kubernetes.io/projected/e1e33c8a-695c-42cb-9e55-3a5346413faf-kube-api-access-8m8s6\") pod \"e1e33c8a-695c-42cb-9e55-3a5346413faf\" (UID: \"e1e33c8a-695c-42cb-9e55-3a5346413faf\") " Dec 05 17:34:19 crc kubenswrapper[4753]: I1205 17:34:19.387550 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e1e33c8a-695c-42cb-9e55-3a5346413faf-ovsdbserver-sb\") pod \"e1e33c8a-695c-42cb-9e55-3a5346413faf\" (UID: \"e1e33c8a-695c-42cb-9e55-3a5346413faf\") " Dec 05 17:34:19 crc kubenswrapper[4753]: I1205 17:34:19.388816 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/98474d5b-9d55-4704-a02c-1fd4ccf3f2f9-config-data\") pod \"98474d5b-9d55-4704-a02c-1fd4ccf3f2f9\" (UID: \"98474d5b-9d55-4704-a02c-1fd4ccf3f2f9\") " Dec 05 17:34:19 crc kubenswrapper[4753]: I1205 17:34:19.388866 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e1e33c8a-695c-42cb-9e55-3a5346413faf-ovsdbserver-nb\") pod \"e1e33c8a-695c-42cb-9e55-3a5346413faf\" (UID: \"e1e33c8a-695c-42cb-9e55-3a5346413faf\") " Dec 05 17:34:19 crc kubenswrapper[4753]: I1205 17:34:19.393987 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bbdd764d-13d3-4597-a938-04eeb490a8ba-kube-api-access-lflvw" (OuterVolumeSpecName: "kube-api-access-lflvw") pod "bbdd764d-13d3-4597-a938-04eeb490a8ba" (UID: "bbdd764d-13d3-4597-a938-04eeb490a8ba"). InnerVolumeSpecName "kube-api-access-lflvw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:34:19 crc kubenswrapper[4753]: I1205 17:34:19.395507 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/98474d5b-9d55-4704-a02c-1fd4ccf3f2f9-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "98474d5b-9d55-4704-a02c-1fd4ccf3f2f9" (UID: "98474d5b-9d55-4704-a02c-1fd4ccf3f2f9"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:34:19 crc kubenswrapper[4753]: I1205 17:34:19.395634 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/98474d5b-9d55-4704-a02c-1fd4ccf3f2f9-kube-api-access-4w8vw" (OuterVolumeSpecName: "kube-api-access-4w8vw") pod "98474d5b-9d55-4704-a02c-1fd4ccf3f2f9" (UID: "98474d5b-9d55-4704-a02c-1fd4ccf3f2f9"). InnerVolumeSpecName "kube-api-access-4w8vw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:34:19 crc kubenswrapper[4753]: I1205 17:34:19.411495 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e1e33c8a-695c-42cb-9e55-3a5346413faf-kube-api-access-8m8s6" (OuterVolumeSpecName: "kube-api-access-8m8s6") pod "e1e33c8a-695c-42cb-9e55-3a5346413faf" (UID: "e1e33c8a-695c-42cb-9e55-3a5346413faf"). InnerVolumeSpecName "kube-api-access-8m8s6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:34:19 crc kubenswrapper[4753]: I1205 17:34:19.431585 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bbdd764d-13d3-4597-a938-04eeb490a8ba-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bbdd764d-13d3-4597-a938-04eeb490a8ba" (UID: "bbdd764d-13d3-4597-a938-04eeb490a8ba"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:34:19 crc kubenswrapper[4753]: I1205 17:34:19.434660 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bbdd764d-13d3-4597-a938-04eeb490a8ba-config" (OuterVolumeSpecName: "config") pod "bbdd764d-13d3-4597-a938-04eeb490a8ba" (UID: "bbdd764d-13d3-4597-a938-04eeb490a8ba"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:34:19 crc kubenswrapper[4753]: I1205 17:34:19.437734 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e1e33c8a-695c-42cb-9e55-3a5346413faf-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "e1e33c8a-695c-42cb-9e55-3a5346413faf" (UID: "e1e33c8a-695c-42cb-9e55-3a5346413faf"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:34:19 crc kubenswrapper[4753]: I1205 17:34:19.440272 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/98474d5b-9d55-4704-a02c-1fd4ccf3f2f9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "98474d5b-9d55-4704-a02c-1fd4ccf3f2f9" (UID: "98474d5b-9d55-4704-a02c-1fd4ccf3f2f9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:34:19 crc kubenswrapper[4753]: I1205 17:34:19.443092 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e1e33c8a-695c-42cb-9e55-3a5346413faf-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "e1e33c8a-695c-42cb-9e55-3a5346413faf" (UID: "e1e33c8a-695c-42cb-9e55-3a5346413faf"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:34:19 crc kubenswrapper[4753]: I1205 17:34:19.446701 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e1e33c8a-695c-42cb-9e55-3a5346413faf-config" (OuterVolumeSpecName: "config") pod "e1e33c8a-695c-42cb-9e55-3a5346413faf" (UID: "e1e33c8a-695c-42cb-9e55-3a5346413faf"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:34:19 crc kubenswrapper[4753]: I1205 17:34:19.452856 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/98474d5b-9d55-4704-a02c-1fd4ccf3f2f9-config-data" (OuterVolumeSpecName: "config-data") pod "98474d5b-9d55-4704-a02c-1fd4ccf3f2f9" (UID: "98474d5b-9d55-4704-a02c-1fd4ccf3f2f9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:34:19 crc kubenswrapper[4753]: I1205 17:34:19.459921 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e1e33c8a-695c-42cb-9e55-3a5346413faf-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "e1e33c8a-695c-42cb-9e55-3a5346413faf" (UID: "e1e33c8a-695c-42cb-9e55-3a5346413faf"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:34:19 crc kubenswrapper[4753]: I1205 17:34:19.490974 4753 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/98474d5b-9d55-4704-a02c-1fd4ccf3f2f9-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:34:19 crc kubenswrapper[4753]: I1205 17:34:19.491013 4753 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e1e33c8a-695c-42cb-9e55-3a5346413faf-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 17:34:19 crc kubenswrapper[4753]: I1205 17:34:19.491026 4753 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/98474d5b-9d55-4704-a02c-1fd4ccf3f2f9-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:34:19 crc kubenswrapper[4753]: I1205 17:34:19.491036 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lflvw\" (UniqueName: \"kubernetes.io/projected/bbdd764d-13d3-4597-a938-04eeb490a8ba-kube-api-access-lflvw\") on node \"crc\" DevicePath \"\"" Dec 05 17:34:19 crc kubenswrapper[4753]: I1205 17:34:19.491057 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4w8vw\" (UniqueName: \"kubernetes.io/projected/98474d5b-9d55-4704-a02c-1fd4ccf3f2f9-kube-api-access-4w8vw\") on node \"crc\" DevicePath \"\"" Dec 05 17:34:19 crc kubenswrapper[4753]: I1205 17:34:19.491067 4753 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/bbdd764d-13d3-4597-a938-04eeb490a8ba-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:34:19 crc kubenswrapper[4753]: I1205 17:34:19.491076 4753 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/98474d5b-9d55-4704-a02c-1fd4ccf3f2f9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:34:19 crc kubenswrapper[4753]: I1205 17:34:19.491085 4753 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bbdd764d-13d3-4597-a938-04eeb490a8ba-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:34:19 crc kubenswrapper[4753]: I1205 17:34:19.491094 4753 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e1e33c8a-695c-42cb-9e55-3a5346413faf-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 17:34:19 crc kubenswrapper[4753]: I1205 17:34:19.491102 4753 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e1e33c8a-695c-42cb-9e55-3a5346413faf-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:34:19 crc kubenswrapper[4753]: I1205 17:34:19.491111 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8m8s6\" (UniqueName: \"kubernetes.io/projected/e1e33c8a-695c-42cb-9e55-3a5346413faf-kube-api-access-8m8s6\") on node \"crc\" DevicePath \"\"" Dec 05 17:34:19 crc kubenswrapper[4753]: I1205 17:34:19.491118 4753 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e1e33c8a-695c-42cb-9e55-3a5346413faf-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 17:34:19 crc kubenswrapper[4753]: I1205 17:34:19.911201 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-rwqhq" event={"ID":"bbdd764d-13d3-4597-a938-04eeb490a8ba","Type":"ContainerDied","Data":"78985ab4be678acc7e6927fb3eb5e300d6d976cdeaddfafe2f74d80d12a0b313"} Dec 05 17:34:19 crc kubenswrapper[4753]: I1205 17:34:19.911244 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="78985ab4be678acc7e6927fb3eb5e300d6d976cdeaddfafe2f74d80d12a0b313" Dec 05 17:34:19 crc kubenswrapper[4753]: I1205 17:34:19.911306 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-rwqhq" Dec 05 17:34:19 crc kubenswrapper[4753]: I1205 17:34:19.914543 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-jzgbr" event={"ID":"98474d5b-9d55-4704-a02c-1fd4ccf3f2f9","Type":"ContainerDied","Data":"1c1ec996a85df84ba6642d091720a36d28b35bef1ee9cc80161eee9099701482"} Dec 05 17:34:19 crc kubenswrapper[4753]: I1205 17:34:19.914567 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1c1ec996a85df84ba6642d091720a36d28b35bef1ee9cc80161eee9099701482" Dec 05 17:34:19 crc kubenswrapper[4753]: I1205 17:34:19.914607 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-jzgbr" Dec 05 17:34:19 crc kubenswrapper[4753]: I1205 17:34:19.918840 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-562p5" event={"ID":"e1e33c8a-695c-42cb-9e55-3a5346413faf","Type":"ContainerDied","Data":"290df3fb7de1a514cae5164a20d8ef202184b5688a6ea3bf49aa7ad9dbf892bb"} Dec 05 17:34:19 crc kubenswrapper[4753]: I1205 17:34:19.918881 4753 scope.go:117] "RemoveContainer" containerID="bdeccf61f1b07b7d8bf5078dca6eb5383f15e0b0a2409c978233e3988824a823" Dec 05 17:34:19 crc kubenswrapper[4753]: I1205 17:34:19.919103 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-562p5" Dec 05 17:34:19 crc kubenswrapper[4753]: I1205 17:34:19.956998 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-562p5"] Dec 05 17:34:19 crc kubenswrapper[4753]: I1205 17:34:19.966278 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-562p5"] Dec 05 17:34:20 crc kubenswrapper[4753]: I1205 17:34:20.530209 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6664c6795f-f76lv"] Dec 05 17:34:20 crc kubenswrapper[4753]: E1205 17:34:20.530661 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bbdd764d-13d3-4597-a938-04eeb490a8ba" containerName="neutron-db-sync" Dec 05 17:34:20 crc kubenswrapper[4753]: I1205 17:34:20.530674 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="bbdd764d-13d3-4597-a938-04eeb490a8ba" containerName="neutron-db-sync" Dec 05 17:34:20 crc kubenswrapper[4753]: E1205 17:34:20.530696 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="98474d5b-9d55-4704-a02c-1fd4ccf3f2f9" containerName="glance-db-sync" Dec 05 17:34:20 crc kubenswrapper[4753]: I1205 17:34:20.530705 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="98474d5b-9d55-4704-a02c-1fd4ccf3f2f9" containerName="glance-db-sync" Dec 05 17:34:20 crc kubenswrapper[4753]: E1205 17:34:20.530727 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1e33c8a-695c-42cb-9e55-3a5346413faf" containerName="init" Dec 05 17:34:20 crc kubenswrapper[4753]: I1205 17:34:20.530736 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1e33c8a-695c-42cb-9e55-3a5346413faf" containerName="init" Dec 05 17:34:20 crc kubenswrapper[4753]: E1205 17:34:20.530753 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1e33c8a-695c-42cb-9e55-3a5346413faf" containerName="dnsmasq-dns" Dec 05 17:34:20 crc kubenswrapper[4753]: I1205 17:34:20.530761 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1e33c8a-695c-42cb-9e55-3a5346413faf" containerName="dnsmasq-dns" Dec 05 17:34:20 crc kubenswrapper[4753]: I1205 17:34:20.530959 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="98474d5b-9d55-4704-a02c-1fd4ccf3f2f9" containerName="glance-db-sync" Dec 05 17:34:20 crc kubenswrapper[4753]: I1205 17:34:20.530976 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1e33c8a-695c-42cb-9e55-3a5346413faf" containerName="dnsmasq-dns" Dec 05 17:34:20 crc kubenswrapper[4753]: I1205 17:34:20.531029 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="bbdd764d-13d3-4597-a938-04eeb490a8ba" containerName="neutron-db-sync" Dec 05 17:34:20 crc kubenswrapper[4753]: I1205 17:34:20.532191 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6664c6795f-f76lv" Dec 05 17:34:20 crc kubenswrapper[4753]: I1205 17:34:20.537311 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6664c6795f-f76lv"] Dec 05 17:34:20 crc kubenswrapper[4753]: I1205 17:34:20.639257 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-c57d65f96-d2bw4"] Dec 05 17:34:20 crc kubenswrapper[4753]: I1205 17:34:20.641131 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-c57d65f96-d2bw4" Dec 05 17:34:20 crc kubenswrapper[4753]: I1205 17:34:20.647881 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 05 17:34:20 crc kubenswrapper[4753]: I1205 17:34:20.648076 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 05 17:34:20 crc kubenswrapper[4753]: I1205 17:34:20.648252 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Dec 05 17:34:20 crc kubenswrapper[4753]: I1205 17:34:20.648412 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-px6hm" Dec 05 17:34:20 crc kubenswrapper[4753]: I1205 17:34:20.658596 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-c57d65f96-d2bw4"] Dec 05 17:34:20 crc kubenswrapper[4753]: I1205 17:34:20.719629 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0acfa75f-8372-4ccb-a91a-6741854cdd34-dns-swift-storage-0\") pod \"dnsmasq-dns-6664c6795f-f76lv\" (UID: \"0acfa75f-8372-4ccb-a91a-6741854cdd34\") " pod="openstack/dnsmasq-dns-6664c6795f-f76lv" Dec 05 17:34:20 crc kubenswrapper[4753]: I1205 17:34:20.719858 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0acfa75f-8372-4ccb-a91a-6741854cdd34-dns-svc\") pod \"dnsmasq-dns-6664c6795f-f76lv\" (UID: \"0acfa75f-8372-4ccb-a91a-6741854cdd34\") " pod="openstack/dnsmasq-dns-6664c6795f-f76lv" Dec 05 17:34:20 crc kubenswrapper[4753]: I1205 17:34:20.719939 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0acfa75f-8372-4ccb-a91a-6741854cdd34-ovsdbserver-nb\") pod \"dnsmasq-dns-6664c6795f-f76lv\" (UID: \"0acfa75f-8372-4ccb-a91a-6741854cdd34\") " pod="openstack/dnsmasq-dns-6664c6795f-f76lv" Dec 05 17:34:20 crc kubenswrapper[4753]: I1205 17:34:20.720033 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vnbv5\" (UniqueName: \"kubernetes.io/projected/0acfa75f-8372-4ccb-a91a-6741854cdd34-kube-api-access-vnbv5\") pod \"dnsmasq-dns-6664c6795f-f76lv\" (UID: \"0acfa75f-8372-4ccb-a91a-6741854cdd34\") " pod="openstack/dnsmasq-dns-6664c6795f-f76lv" Dec 05 17:34:20 crc kubenswrapper[4753]: I1205 17:34:20.720427 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0acfa75f-8372-4ccb-a91a-6741854cdd34-ovsdbserver-sb\") pod \"dnsmasq-dns-6664c6795f-f76lv\" (UID: \"0acfa75f-8372-4ccb-a91a-6741854cdd34\") " pod="openstack/dnsmasq-dns-6664c6795f-f76lv" Dec 05 17:34:20 crc kubenswrapper[4753]: I1205 17:34:20.720686 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0acfa75f-8372-4ccb-a91a-6741854cdd34-config\") pod \"dnsmasq-dns-6664c6795f-f76lv\" (UID: \"0acfa75f-8372-4ccb-a91a-6741854cdd34\") " pod="openstack/dnsmasq-dns-6664c6795f-f76lv" Dec 05 17:34:20 crc kubenswrapper[4753]: I1205 17:34:20.749898 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6664c6795f-f76lv"] Dec 05 17:34:20 crc kubenswrapper[4753]: E1205 17:34:20.753024 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[config dns-svc dns-swift-storage-0 kube-api-access-vnbv5 ovsdbserver-nb ovsdbserver-sb], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/dnsmasq-dns-6664c6795f-f76lv" podUID="0acfa75f-8372-4ccb-a91a-6741854cdd34" Dec 05 17:34:20 crc kubenswrapper[4753]: I1205 17:34:20.810546 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5ccc5c4795-stl2q"] Dec 05 17:34:20 crc kubenswrapper[4753]: I1205 17:34:20.821547 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ccc5c4795-stl2q" Dec 05 17:34:20 crc kubenswrapper[4753]: I1205 17:34:20.824551 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0acfa75f-8372-4ccb-a91a-6741854cdd34-config\") pod \"dnsmasq-dns-6664c6795f-f76lv\" (UID: \"0acfa75f-8372-4ccb-a91a-6741854cdd34\") " pod="openstack/dnsmasq-dns-6664c6795f-f76lv" Dec 05 17:34:20 crc kubenswrapper[4753]: I1205 17:34:20.824621 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c69accdb-191f-4ea8-905c-308492e5c663-ovndb-tls-certs\") pod \"neutron-c57d65f96-d2bw4\" (UID: \"c69accdb-191f-4ea8-905c-308492e5c663\") " pod="openstack/neutron-c57d65f96-d2bw4" Dec 05 17:34:20 crc kubenswrapper[4753]: I1205 17:34:20.824691 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0acfa75f-8372-4ccb-a91a-6741854cdd34-dns-swift-storage-0\") pod \"dnsmasq-dns-6664c6795f-f76lv\" (UID: \"0acfa75f-8372-4ccb-a91a-6741854cdd34\") " pod="openstack/dnsmasq-dns-6664c6795f-f76lv" Dec 05 17:34:20 crc kubenswrapper[4753]: I1205 17:34:20.824706 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/c69accdb-191f-4ea8-905c-308492e5c663-httpd-config\") pod \"neutron-c57d65f96-d2bw4\" (UID: \"c69accdb-191f-4ea8-905c-308492e5c663\") " pod="openstack/neutron-c57d65f96-d2bw4" Dec 05 17:34:20 crc kubenswrapper[4753]: I1205 17:34:20.824754 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0acfa75f-8372-4ccb-a91a-6741854cdd34-dns-svc\") pod \"dnsmasq-dns-6664c6795f-f76lv\" (UID: \"0acfa75f-8372-4ccb-a91a-6741854cdd34\") " pod="openstack/dnsmasq-dns-6664c6795f-f76lv" Dec 05 17:34:20 crc kubenswrapper[4753]: I1205 17:34:20.824775 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0acfa75f-8372-4ccb-a91a-6741854cdd34-ovsdbserver-nb\") pod \"dnsmasq-dns-6664c6795f-f76lv\" (UID: \"0acfa75f-8372-4ccb-a91a-6741854cdd34\") " pod="openstack/dnsmasq-dns-6664c6795f-f76lv" Dec 05 17:34:20 crc kubenswrapper[4753]: I1205 17:34:20.824809 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c69accdb-191f-4ea8-905c-308492e5c663-combined-ca-bundle\") pod \"neutron-c57d65f96-d2bw4\" (UID: \"c69accdb-191f-4ea8-905c-308492e5c663\") " pod="openstack/neutron-c57d65f96-d2bw4" Dec 05 17:34:20 crc kubenswrapper[4753]: I1205 17:34:20.824832 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vnbv5\" (UniqueName: \"kubernetes.io/projected/0acfa75f-8372-4ccb-a91a-6741854cdd34-kube-api-access-vnbv5\") pod \"dnsmasq-dns-6664c6795f-f76lv\" (UID: \"0acfa75f-8372-4ccb-a91a-6741854cdd34\") " pod="openstack/dnsmasq-dns-6664c6795f-f76lv" Dec 05 17:34:20 crc kubenswrapper[4753]: I1205 17:34:20.824853 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rllgw\" (UniqueName: \"kubernetes.io/projected/c69accdb-191f-4ea8-905c-308492e5c663-kube-api-access-rllgw\") pod \"neutron-c57d65f96-d2bw4\" (UID: \"c69accdb-191f-4ea8-905c-308492e5c663\") " pod="openstack/neutron-c57d65f96-d2bw4" Dec 05 17:34:20 crc kubenswrapper[4753]: I1205 17:34:20.824964 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0acfa75f-8372-4ccb-a91a-6741854cdd34-ovsdbserver-sb\") pod \"dnsmasq-dns-6664c6795f-f76lv\" (UID: \"0acfa75f-8372-4ccb-a91a-6741854cdd34\") " pod="openstack/dnsmasq-dns-6664c6795f-f76lv" Dec 05 17:34:20 crc kubenswrapper[4753]: I1205 17:34:20.824981 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/c69accdb-191f-4ea8-905c-308492e5c663-config\") pod \"neutron-c57d65f96-d2bw4\" (UID: \"c69accdb-191f-4ea8-905c-308492e5c663\") " pod="openstack/neutron-c57d65f96-d2bw4" Dec 05 17:34:20 crc kubenswrapper[4753]: I1205 17:34:20.827707 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0acfa75f-8372-4ccb-a91a-6741854cdd34-config\") pod \"dnsmasq-dns-6664c6795f-f76lv\" (UID: \"0acfa75f-8372-4ccb-a91a-6741854cdd34\") " pod="openstack/dnsmasq-dns-6664c6795f-f76lv" Dec 05 17:34:20 crc kubenswrapper[4753]: I1205 17:34:20.828507 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0acfa75f-8372-4ccb-a91a-6741854cdd34-dns-swift-storage-0\") pod \"dnsmasq-dns-6664c6795f-f76lv\" (UID: \"0acfa75f-8372-4ccb-a91a-6741854cdd34\") " pod="openstack/dnsmasq-dns-6664c6795f-f76lv" Dec 05 17:34:20 crc kubenswrapper[4753]: I1205 17:34:20.829696 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0acfa75f-8372-4ccb-a91a-6741854cdd34-dns-svc\") pod \"dnsmasq-dns-6664c6795f-f76lv\" (UID: \"0acfa75f-8372-4ccb-a91a-6741854cdd34\") " pod="openstack/dnsmasq-dns-6664c6795f-f76lv" Dec 05 17:34:20 crc kubenswrapper[4753]: I1205 17:34:20.830039 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0acfa75f-8372-4ccb-a91a-6741854cdd34-ovsdbserver-sb\") pod \"dnsmasq-dns-6664c6795f-f76lv\" (UID: \"0acfa75f-8372-4ccb-a91a-6741854cdd34\") " pod="openstack/dnsmasq-dns-6664c6795f-f76lv" Dec 05 17:34:20 crc kubenswrapper[4753]: I1205 17:34:20.830083 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0acfa75f-8372-4ccb-a91a-6741854cdd34-ovsdbserver-nb\") pod \"dnsmasq-dns-6664c6795f-f76lv\" (UID: \"0acfa75f-8372-4ccb-a91a-6741854cdd34\") " pod="openstack/dnsmasq-dns-6664c6795f-f76lv" Dec 05 17:34:20 crc kubenswrapper[4753]: I1205 17:34:20.837052 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5ccc5c4795-stl2q"] Dec 05 17:34:20 crc kubenswrapper[4753]: I1205 17:34:20.882000 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vnbv5\" (UniqueName: \"kubernetes.io/projected/0acfa75f-8372-4ccb-a91a-6741854cdd34-kube-api-access-vnbv5\") pod \"dnsmasq-dns-6664c6795f-f76lv\" (UID: \"0acfa75f-8372-4ccb-a91a-6741854cdd34\") " pod="openstack/dnsmasq-dns-6664c6795f-f76lv" Dec 05 17:34:20 crc kubenswrapper[4753]: I1205 17:34:20.928853 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/c69accdb-191f-4ea8-905c-308492e5c663-config\") pod \"neutron-c57d65f96-d2bw4\" (UID: \"c69accdb-191f-4ea8-905c-308492e5c663\") " pod="openstack/neutron-c57d65f96-d2bw4" Dec 05 17:34:20 crc kubenswrapper[4753]: I1205 17:34:20.928967 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7fa187c8-5ebc-4112-8551-0c551b537f32-config\") pod \"dnsmasq-dns-5ccc5c4795-stl2q\" (UID: \"7fa187c8-5ebc-4112-8551-0c551b537f32\") " pod="openstack/dnsmasq-dns-5ccc5c4795-stl2q" Dec 05 17:34:20 crc kubenswrapper[4753]: I1205 17:34:20.929002 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v4r57\" (UniqueName: \"kubernetes.io/projected/7fa187c8-5ebc-4112-8551-0c551b537f32-kube-api-access-v4r57\") pod \"dnsmasq-dns-5ccc5c4795-stl2q\" (UID: \"7fa187c8-5ebc-4112-8551-0c551b537f32\") " pod="openstack/dnsmasq-dns-5ccc5c4795-stl2q" Dec 05 17:34:20 crc kubenswrapper[4753]: I1205 17:34:20.929030 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7fa187c8-5ebc-4112-8551-0c551b537f32-ovsdbserver-nb\") pod \"dnsmasq-dns-5ccc5c4795-stl2q\" (UID: \"7fa187c8-5ebc-4112-8551-0c551b537f32\") " pod="openstack/dnsmasq-dns-5ccc5c4795-stl2q" Dec 05 17:34:20 crc kubenswrapper[4753]: I1205 17:34:20.929056 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c69accdb-191f-4ea8-905c-308492e5c663-ovndb-tls-certs\") pod \"neutron-c57d65f96-d2bw4\" (UID: \"c69accdb-191f-4ea8-905c-308492e5c663\") " pod="openstack/neutron-c57d65f96-d2bw4" Dec 05 17:34:20 crc kubenswrapper[4753]: I1205 17:34:20.929105 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/c69accdb-191f-4ea8-905c-308492e5c663-httpd-config\") pod \"neutron-c57d65f96-d2bw4\" (UID: \"c69accdb-191f-4ea8-905c-308492e5c663\") " pod="openstack/neutron-c57d65f96-d2bw4" Dec 05 17:34:20 crc kubenswrapper[4753]: I1205 17:34:20.929174 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7fa187c8-5ebc-4112-8551-0c551b537f32-dns-svc\") pod \"dnsmasq-dns-5ccc5c4795-stl2q\" (UID: \"7fa187c8-5ebc-4112-8551-0c551b537f32\") " pod="openstack/dnsmasq-dns-5ccc5c4795-stl2q" Dec 05 17:34:20 crc kubenswrapper[4753]: I1205 17:34:20.929199 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c69accdb-191f-4ea8-905c-308492e5c663-combined-ca-bundle\") pod \"neutron-c57d65f96-d2bw4\" (UID: \"c69accdb-191f-4ea8-905c-308492e5c663\") " pod="openstack/neutron-c57d65f96-d2bw4" Dec 05 17:34:20 crc kubenswrapper[4753]: I1205 17:34:20.929228 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rllgw\" (UniqueName: \"kubernetes.io/projected/c69accdb-191f-4ea8-905c-308492e5c663-kube-api-access-rllgw\") pod \"neutron-c57d65f96-d2bw4\" (UID: \"c69accdb-191f-4ea8-905c-308492e5c663\") " pod="openstack/neutron-c57d65f96-d2bw4" Dec 05 17:34:20 crc kubenswrapper[4753]: I1205 17:34:20.929273 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7fa187c8-5ebc-4112-8551-0c551b537f32-dns-swift-storage-0\") pod \"dnsmasq-dns-5ccc5c4795-stl2q\" (UID: \"7fa187c8-5ebc-4112-8551-0c551b537f32\") " pod="openstack/dnsmasq-dns-5ccc5c4795-stl2q" Dec 05 17:34:20 crc kubenswrapper[4753]: I1205 17:34:20.929295 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7fa187c8-5ebc-4112-8551-0c551b537f32-ovsdbserver-sb\") pod \"dnsmasq-dns-5ccc5c4795-stl2q\" (UID: \"7fa187c8-5ebc-4112-8551-0c551b537f32\") " pod="openstack/dnsmasq-dns-5ccc5c4795-stl2q" Dec 05 17:34:20 crc kubenswrapper[4753]: I1205 17:34:20.934439 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c69accdb-191f-4ea8-905c-308492e5c663-ovndb-tls-certs\") pod \"neutron-c57d65f96-d2bw4\" (UID: \"c69accdb-191f-4ea8-905c-308492e5c663\") " pod="openstack/neutron-c57d65f96-d2bw4" Dec 05 17:34:20 crc kubenswrapper[4753]: I1205 17:34:20.934509 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6664c6795f-f76lv" Dec 05 17:34:20 crc kubenswrapper[4753]: I1205 17:34:20.935540 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/c69accdb-191f-4ea8-905c-308492e5c663-config\") pod \"neutron-c57d65f96-d2bw4\" (UID: \"c69accdb-191f-4ea8-905c-308492e5c663\") " pod="openstack/neutron-c57d65f96-d2bw4" Dec 05 17:34:20 crc kubenswrapper[4753]: I1205 17:34:20.936909 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/c69accdb-191f-4ea8-905c-308492e5c663-httpd-config\") pod \"neutron-c57d65f96-d2bw4\" (UID: \"c69accdb-191f-4ea8-905c-308492e5c663\") " pod="openstack/neutron-c57d65f96-d2bw4" Dec 05 17:34:20 crc kubenswrapper[4753]: I1205 17:34:20.944107 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c69accdb-191f-4ea8-905c-308492e5c663-combined-ca-bundle\") pod \"neutron-c57d65f96-d2bw4\" (UID: \"c69accdb-191f-4ea8-905c-308492e5c663\") " pod="openstack/neutron-c57d65f96-d2bw4" Dec 05 17:34:20 crc kubenswrapper[4753]: I1205 17:34:20.962845 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rllgw\" (UniqueName: \"kubernetes.io/projected/c69accdb-191f-4ea8-905c-308492e5c663-kube-api-access-rllgw\") pod \"neutron-c57d65f96-d2bw4\" (UID: \"c69accdb-191f-4ea8-905c-308492e5c663\") " pod="openstack/neutron-c57d65f96-d2bw4" Dec 05 17:34:20 crc kubenswrapper[4753]: I1205 17:34:20.974820 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-c57d65f96-d2bw4" Dec 05 17:34:21 crc kubenswrapper[4753]: I1205 17:34:21.032638 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7fa187c8-5ebc-4112-8551-0c551b537f32-dns-swift-storage-0\") pod \"dnsmasq-dns-5ccc5c4795-stl2q\" (UID: \"7fa187c8-5ebc-4112-8551-0c551b537f32\") " pod="openstack/dnsmasq-dns-5ccc5c4795-stl2q" Dec 05 17:34:21 crc kubenswrapper[4753]: I1205 17:34:21.032702 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7fa187c8-5ebc-4112-8551-0c551b537f32-ovsdbserver-sb\") pod \"dnsmasq-dns-5ccc5c4795-stl2q\" (UID: \"7fa187c8-5ebc-4112-8551-0c551b537f32\") " pod="openstack/dnsmasq-dns-5ccc5c4795-stl2q" Dec 05 17:34:21 crc kubenswrapper[4753]: I1205 17:34:21.032808 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7fa187c8-5ebc-4112-8551-0c551b537f32-config\") pod \"dnsmasq-dns-5ccc5c4795-stl2q\" (UID: \"7fa187c8-5ebc-4112-8551-0c551b537f32\") " pod="openstack/dnsmasq-dns-5ccc5c4795-stl2q" Dec 05 17:34:21 crc kubenswrapper[4753]: I1205 17:34:21.032849 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v4r57\" (UniqueName: \"kubernetes.io/projected/7fa187c8-5ebc-4112-8551-0c551b537f32-kube-api-access-v4r57\") pod \"dnsmasq-dns-5ccc5c4795-stl2q\" (UID: \"7fa187c8-5ebc-4112-8551-0c551b537f32\") " pod="openstack/dnsmasq-dns-5ccc5c4795-stl2q" Dec 05 17:34:21 crc kubenswrapper[4753]: I1205 17:34:21.032881 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7fa187c8-5ebc-4112-8551-0c551b537f32-ovsdbserver-nb\") pod \"dnsmasq-dns-5ccc5c4795-stl2q\" (UID: \"7fa187c8-5ebc-4112-8551-0c551b537f32\") " pod="openstack/dnsmasq-dns-5ccc5c4795-stl2q" Dec 05 17:34:21 crc kubenswrapper[4753]: I1205 17:34:21.032963 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7fa187c8-5ebc-4112-8551-0c551b537f32-dns-svc\") pod \"dnsmasq-dns-5ccc5c4795-stl2q\" (UID: \"7fa187c8-5ebc-4112-8551-0c551b537f32\") " pod="openstack/dnsmasq-dns-5ccc5c4795-stl2q" Dec 05 17:34:21 crc kubenswrapper[4753]: I1205 17:34:21.034570 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7fa187c8-5ebc-4112-8551-0c551b537f32-dns-svc\") pod \"dnsmasq-dns-5ccc5c4795-stl2q\" (UID: \"7fa187c8-5ebc-4112-8551-0c551b537f32\") " pod="openstack/dnsmasq-dns-5ccc5c4795-stl2q" Dec 05 17:34:21 crc kubenswrapper[4753]: I1205 17:34:21.038465 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7fa187c8-5ebc-4112-8551-0c551b537f32-dns-swift-storage-0\") pod \"dnsmasq-dns-5ccc5c4795-stl2q\" (UID: \"7fa187c8-5ebc-4112-8551-0c551b537f32\") " pod="openstack/dnsmasq-dns-5ccc5c4795-stl2q" Dec 05 17:34:21 crc kubenswrapper[4753]: I1205 17:34:21.039171 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6664c6795f-f76lv" Dec 05 17:34:21 crc kubenswrapper[4753]: I1205 17:34:21.042622 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7fa187c8-5ebc-4112-8551-0c551b537f32-ovsdbserver-sb\") pod \"dnsmasq-dns-5ccc5c4795-stl2q\" (UID: \"7fa187c8-5ebc-4112-8551-0c551b537f32\") " pod="openstack/dnsmasq-dns-5ccc5c4795-stl2q" Dec 05 17:34:21 crc kubenswrapper[4753]: I1205 17:34:21.042664 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7fa187c8-5ebc-4112-8551-0c551b537f32-ovsdbserver-nb\") pod \"dnsmasq-dns-5ccc5c4795-stl2q\" (UID: \"7fa187c8-5ebc-4112-8551-0c551b537f32\") " pod="openstack/dnsmasq-dns-5ccc5c4795-stl2q" Dec 05 17:34:21 crc kubenswrapper[4753]: I1205 17:34:21.042631 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7fa187c8-5ebc-4112-8551-0c551b537f32-config\") pod \"dnsmasq-dns-5ccc5c4795-stl2q\" (UID: \"7fa187c8-5ebc-4112-8551-0c551b537f32\") " pod="openstack/dnsmasq-dns-5ccc5c4795-stl2q" Dec 05 17:34:21 crc kubenswrapper[4753]: I1205 17:34:21.053134 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v4r57\" (UniqueName: \"kubernetes.io/projected/7fa187c8-5ebc-4112-8551-0c551b537f32-kube-api-access-v4r57\") pod \"dnsmasq-dns-5ccc5c4795-stl2q\" (UID: \"7fa187c8-5ebc-4112-8551-0c551b537f32\") " pod="openstack/dnsmasq-dns-5ccc5c4795-stl2q" Dec 05 17:34:21 crc kubenswrapper[4753]: I1205 17:34:21.137041 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0acfa75f-8372-4ccb-a91a-6741854cdd34-dns-swift-storage-0\") pod \"0acfa75f-8372-4ccb-a91a-6741854cdd34\" (UID: \"0acfa75f-8372-4ccb-a91a-6741854cdd34\") " Dec 05 17:34:21 crc kubenswrapper[4753]: I1205 17:34:21.137127 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0acfa75f-8372-4ccb-a91a-6741854cdd34-ovsdbserver-sb\") pod \"0acfa75f-8372-4ccb-a91a-6741854cdd34\" (UID: \"0acfa75f-8372-4ccb-a91a-6741854cdd34\") " Dec 05 17:34:21 crc kubenswrapper[4753]: I1205 17:34:21.137240 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0acfa75f-8372-4ccb-a91a-6741854cdd34-config\") pod \"0acfa75f-8372-4ccb-a91a-6741854cdd34\" (UID: \"0acfa75f-8372-4ccb-a91a-6741854cdd34\") " Dec 05 17:34:21 crc kubenswrapper[4753]: I1205 17:34:21.137393 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0acfa75f-8372-4ccb-a91a-6741854cdd34-dns-svc\") pod \"0acfa75f-8372-4ccb-a91a-6741854cdd34\" (UID: \"0acfa75f-8372-4ccb-a91a-6741854cdd34\") " Dec 05 17:34:21 crc kubenswrapper[4753]: I1205 17:34:21.137419 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0acfa75f-8372-4ccb-a91a-6741854cdd34-ovsdbserver-nb\") pod \"0acfa75f-8372-4ccb-a91a-6741854cdd34\" (UID: \"0acfa75f-8372-4ccb-a91a-6741854cdd34\") " Dec 05 17:34:21 crc kubenswrapper[4753]: I1205 17:34:21.137441 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vnbv5\" (UniqueName: \"kubernetes.io/projected/0acfa75f-8372-4ccb-a91a-6741854cdd34-kube-api-access-vnbv5\") pod \"0acfa75f-8372-4ccb-a91a-6741854cdd34\" (UID: \"0acfa75f-8372-4ccb-a91a-6741854cdd34\") " Dec 05 17:34:21 crc kubenswrapper[4753]: I1205 17:34:21.137644 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0acfa75f-8372-4ccb-a91a-6741854cdd34-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "0acfa75f-8372-4ccb-a91a-6741854cdd34" (UID: "0acfa75f-8372-4ccb-a91a-6741854cdd34"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:34:21 crc kubenswrapper[4753]: I1205 17:34:21.137658 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0acfa75f-8372-4ccb-a91a-6741854cdd34-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "0acfa75f-8372-4ccb-a91a-6741854cdd34" (UID: "0acfa75f-8372-4ccb-a91a-6741854cdd34"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:34:21 crc kubenswrapper[4753]: I1205 17:34:21.137999 4753 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0acfa75f-8372-4ccb-a91a-6741854cdd34-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 17:34:21 crc kubenswrapper[4753]: I1205 17:34:21.138018 4753 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0acfa75f-8372-4ccb-a91a-6741854cdd34-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 17:34:21 crc kubenswrapper[4753]: I1205 17:34:21.138267 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0acfa75f-8372-4ccb-a91a-6741854cdd34-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "0acfa75f-8372-4ccb-a91a-6741854cdd34" (UID: "0acfa75f-8372-4ccb-a91a-6741854cdd34"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:34:21 crc kubenswrapper[4753]: I1205 17:34:21.138417 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0acfa75f-8372-4ccb-a91a-6741854cdd34-config" (OuterVolumeSpecName: "config") pod "0acfa75f-8372-4ccb-a91a-6741854cdd34" (UID: "0acfa75f-8372-4ccb-a91a-6741854cdd34"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:34:21 crc kubenswrapper[4753]: I1205 17:34:21.138589 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0acfa75f-8372-4ccb-a91a-6741854cdd34-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "0acfa75f-8372-4ccb-a91a-6741854cdd34" (UID: "0acfa75f-8372-4ccb-a91a-6741854cdd34"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:34:21 crc kubenswrapper[4753]: I1205 17:34:21.142478 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0acfa75f-8372-4ccb-a91a-6741854cdd34-kube-api-access-vnbv5" (OuterVolumeSpecName: "kube-api-access-vnbv5") pod "0acfa75f-8372-4ccb-a91a-6741854cdd34" (UID: "0acfa75f-8372-4ccb-a91a-6741854cdd34"). InnerVolumeSpecName "kube-api-access-vnbv5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:34:21 crc kubenswrapper[4753]: I1205 17:34:21.237532 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ccc5c4795-stl2q" Dec 05 17:34:21 crc kubenswrapper[4753]: I1205 17:34:21.239140 4753 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0acfa75f-8372-4ccb-a91a-6741854cdd34-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:34:21 crc kubenswrapper[4753]: I1205 17:34:21.239183 4753 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0acfa75f-8372-4ccb-a91a-6741854cdd34-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 17:34:21 crc kubenswrapper[4753]: I1205 17:34:21.239198 4753 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0acfa75f-8372-4ccb-a91a-6741854cdd34-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 17:34:21 crc kubenswrapper[4753]: I1205 17:34:21.239212 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vnbv5\" (UniqueName: \"kubernetes.io/projected/0acfa75f-8372-4ccb-a91a-6741854cdd34-kube-api-access-vnbv5\") on node \"crc\" DevicePath \"\"" Dec 05 17:34:21 crc kubenswrapper[4753]: I1205 17:34:21.743216 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e1e33c8a-695c-42cb-9e55-3a5346413faf" path="/var/lib/kubelet/pods/e1e33c8a-695c-42cb-9e55-3a5346413faf/volumes" Dec 05 17:34:21 crc kubenswrapper[4753]: I1205 17:34:21.802062 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 17:34:21 crc kubenswrapper[4753]: I1205 17:34:21.838839 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 17:34:21 crc kubenswrapper[4753]: I1205 17:34:21.852693 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 17:34:21 crc kubenswrapper[4753]: I1205 17:34:21.857213 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-x2mdw" Dec 05 17:34:21 crc kubenswrapper[4753]: I1205 17:34:21.857699 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Dec 05 17:34:21 crc kubenswrapper[4753]: I1205 17:34:21.858899 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 05 17:34:21 crc kubenswrapper[4753]: I1205 17:34:21.914079 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 17:34:21 crc kubenswrapper[4753]: I1205 17:34:21.916516 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 17:34:21 crc kubenswrapper[4753]: I1205 17:34:21.919291 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 05 17:34:21 crc kubenswrapper[4753]: I1205 17:34:21.927758 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 17:34:21 crc kubenswrapper[4753]: I1205 17:34:21.947192 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6664c6795f-f76lv" Dec 05 17:34:21 crc kubenswrapper[4753]: I1205 17:34:21.954624 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab469bdf-ba51-42a6-9f6b-ce772e850050-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"ab469bdf-ba51-42a6-9f6b-ce772e850050\") " pod="openstack/glance-default-external-api-0" Dec 05 17:34:21 crc kubenswrapper[4753]: I1205 17:34:21.954738 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab469bdf-ba51-42a6-9f6b-ce772e850050-config-data\") pod \"glance-default-external-api-0\" (UID: \"ab469bdf-ba51-42a6-9f6b-ce772e850050\") " pod="openstack/glance-default-external-api-0" Dec 05 17:34:21 crc kubenswrapper[4753]: I1205 17:34:21.954945 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ab469bdf-ba51-42a6-9f6b-ce772e850050-scripts\") pod \"glance-default-external-api-0\" (UID: \"ab469bdf-ba51-42a6-9f6b-ce772e850050\") " pod="openstack/glance-default-external-api-0" Dec 05 17:34:21 crc kubenswrapper[4753]: I1205 17:34:21.955079 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-72hnh\" (UniqueName: \"kubernetes.io/projected/ab469bdf-ba51-42a6-9f6b-ce772e850050-kube-api-access-72hnh\") pod \"glance-default-external-api-0\" (UID: \"ab469bdf-ba51-42a6-9f6b-ce772e850050\") " pod="openstack/glance-default-external-api-0" Dec 05 17:34:21 crc kubenswrapper[4753]: I1205 17:34:21.955202 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ab469bdf-ba51-42a6-9f6b-ce772e850050-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"ab469bdf-ba51-42a6-9f6b-ce772e850050\") " pod="openstack/glance-default-external-api-0" Dec 05 17:34:21 crc kubenswrapper[4753]: I1205 17:34:21.955250 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-8aedb3c5-dd54-4f7e-b1a5-0e458df9d312\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8aedb3c5-dd54-4f7e-b1a5-0e458df9d312\") pod \"glance-default-external-api-0\" (UID: \"ab469bdf-ba51-42a6-9f6b-ce772e850050\") " pod="openstack/glance-default-external-api-0" Dec 05 17:34:21 crc kubenswrapper[4753]: I1205 17:34:21.955350 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ab469bdf-ba51-42a6-9f6b-ce772e850050-logs\") pod \"glance-default-external-api-0\" (UID: \"ab469bdf-ba51-42a6-9f6b-ce772e850050\") " pod="openstack/glance-default-external-api-0" Dec 05 17:34:22 crc kubenswrapper[4753]: I1205 17:34:22.006932 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6664c6795f-f76lv"] Dec 05 17:34:22 crc kubenswrapper[4753]: I1205 17:34:22.025635 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6664c6795f-f76lv"] Dec 05 17:34:22 crc kubenswrapper[4753]: I1205 17:34:22.056501 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/70451c95-c6a0-474c-8707-3cb683987428-logs\") pod \"glance-default-internal-api-0\" (UID: \"70451c95-c6a0-474c-8707-3cb683987428\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:34:22 crc kubenswrapper[4753]: I1205 17:34:22.056564 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70451c95-c6a0-474c-8707-3cb683987428-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"70451c95-c6a0-474c-8707-3cb683987428\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:34:22 crc kubenswrapper[4753]: I1205 17:34:22.056665 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab469bdf-ba51-42a6-9f6b-ce772e850050-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"ab469bdf-ba51-42a6-9f6b-ce772e850050\") " pod="openstack/glance-default-external-api-0" Dec 05 17:34:22 crc kubenswrapper[4753]: I1205 17:34:22.056720 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab469bdf-ba51-42a6-9f6b-ce772e850050-config-data\") pod \"glance-default-external-api-0\" (UID: \"ab469bdf-ba51-42a6-9f6b-ce772e850050\") " pod="openstack/glance-default-external-api-0" Dec 05 17:34:22 crc kubenswrapper[4753]: I1205 17:34:22.056766 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wbfn9\" (UniqueName: \"kubernetes.io/projected/70451c95-c6a0-474c-8707-3cb683987428-kube-api-access-wbfn9\") pod \"glance-default-internal-api-0\" (UID: \"70451c95-c6a0-474c-8707-3cb683987428\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:34:22 crc kubenswrapper[4753]: I1205 17:34:22.056786 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-6f7bb1ce-2b18-4809-8632-596a297f364b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6f7bb1ce-2b18-4809-8632-596a297f364b\") pod \"glance-default-internal-api-0\" (UID: \"70451c95-c6a0-474c-8707-3cb683987428\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:34:22 crc kubenswrapper[4753]: I1205 17:34:22.056808 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/70451c95-c6a0-474c-8707-3cb683987428-scripts\") pod \"glance-default-internal-api-0\" (UID: \"70451c95-c6a0-474c-8707-3cb683987428\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:34:22 crc kubenswrapper[4753]: I1205 17:34:22.056830 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ab469bdf-ba51-42a6-9f6b-ce772e850050-scripts\") pod \"glance-default-external-api-0\" (UID: \"ab469bdf-ba51-42a6-9f6b-ce772e850050\") " pod="openstack/glance-default-external-api-0" Dec 05 17:34:22 crc kubenswrapper[4753]: I1205 17:34:22.056982 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-72hnh\" (UniqueName: \"kubernetes.io/projected/ab469bdf-ba51-42a6-9f6b-ce772e850050-kube-api-access-72hnh\") pod \"glance-default-external-api-0\" (UID: \"ab469bdf-ba51-42a6-9f6b-ce772e850050\") " pod="openstack/glance-default-external-api-0" Dec 05 17:34:22 crc kubenswrapper[4753]: I1205 17:34:22.057103 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70451c95-c6a0-474c-8707-3cb683987428-config-data\") pod \"glance-default-internal-api-0\" (UID: \"70451c95-c6a0-474c-8707-3cb683987428\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:34:22 crc kubenswrapper[4753]: I1205 17:34:22.057263 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/70451c95-c6a0-474c-8707-3cb683987428-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"70451c95-c6a0-474c-8707-3cb683987428\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:34:22 crc kubenswrapper[4753]: I1205 17:34:22.057306 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ab469bdf-ba51-42a6-9f6b-ce772e850050-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"ab469bdf-ba51-42a6-9f6b-ce772e850050\") " pod="openstack/glance-default-external-api-0" Dec 05 17:34:22 crc kubenswrapper[4753]: I1205 17:34:22.057365 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-8aedb3c5-dd54-4f7e-b1a5-0e458df9d312\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8aedb3c5-dd54-4f7e-b1a5-0e458df9d312\") pod \"glance-default-external-api-0\" (UID: \"ab469bdf-ba51-42a6-9f6b-ce772e850050\") " pod="openstack/glance-default-external-api-0" Dec 05 17:34:22 crc kubenswrapper[4753]: I1205 17:34:22.057507 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ab469bdf-ba51-42a6-9f6b-ce772e850050-logs\") pod \"glance-default-external-api-0\" (UID: \"ab469bdf-ba51-42a6-9f6b-ce772e850050\") " pod="openstack/glance-default-external-api-0" Dec 05 17:34:22 crc kubenswrapper[4753]: I1205 17:34:22.058226 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ab469bdf-ba51-42a6-9f6b-ce772e850050-logs\") pod \"glance-default-external-api-0\" (UID: \"ab469bdf-ba51-42a6-9f6b-ce772e850050\") " pod="openstack/glance-default-external-api-0" Dec 05 17:34:22 crc kubenswrapper[4753]: I1205 17:34:22.062882 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab469bdf-ba51-42a6-9f6b-ce772e850050-config-data\") pod \"glance-default-external-api-0\" (UID: \"ab469bdf-ba51-42a6-9f6b-ce772e850050\") " pod="openstack/glance-default-external-api-0" Dec 05 17:34:22 crc kubenswrapper[4753]: I1205 17:34:22.063056 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ab469bdf-ba51-42a6-9f6b-ce772e850050-scripts\") pod \"glance-default-external-api-0\" (UID: \"ab469bdf-ba51-42a6-9f6b-ce772e850050\") " pod="openstack/glance-default-external-api-0" Dec 05 17:34:22 crc kubenswrapper[4753]: I1205 17:34:22.063554 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ab469bdf-ba51-42a6-9f6b-ce772e850050-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"ab469bdf-ba51-42a6-9f6b-ce772e850050\") " pod="openstack/glance-default-external-api-0" Dec 05 17:34:22 crc kubenswrapper[4753]: I1205 17:34:22.071683 4753 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 05 17:34:22 crc kubenswrapper[4753]: I1205 17:34:22.071741 4753 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-8aedb3c5-dd54-4f7e-b1a5-0e458df9d312\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8aedb3c5-dd54-4f7e-b1a5-0e458df9d312\") pod \"glance-default-external-api-0\" (UID: \"ab469bdf-ba51-42a6-9f6b-ce772e850050\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/0a0f782b6c9e85c3ca02611e1c19370b05b7adf40a84b1c1fba977879932e0cc/globalmount\"" pod="openstack/glance-default-external-api-0" Dec 05 17:34:22 crc kubenswrapper[4753]: I1205 17:34:22.075208 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab469bdf-ba51-42a6-9f6b-ce772e850050-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"ab469bdf-ba51-42a6-9f6b-ce772e850050\") " pod="openstack/glance-default-external-api-0" Dec 05 17:34:22 crc kubenswrapper[4753]: I1205 17:34:22.084779 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-72hnh\" (UniqueName: \"kubernetes.io/projected/ab469bdf-ba51-42a6-9f6b-ce772e850050-kube-api-access-72hnh\") pod \"glance-default-external-api-0\" (UID: \"ab469bdf-ba51-42a6-9f6b-ce772e850050\") " pod="openstack/glance-default-external-api-0" Dec 05 17:34:22 crc kubenswrapper[4753]: I1205 17:34:22.136413 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-8aedb3c5-dd54-4f7e-b1a5-0e458df9d312\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8aedb3c5-dd54-4f7e-b1a5-0e458df9d312\") pod \"glance-default-external-api-0\" (UID: \"ab469bdf-ba51-42a6-9f6b-ce772e850050\") " pod="openstack/glance-default-external-api-0" Dec 05 17:34:22 crc kubenswrapper[4753]: I1205 17:34:22.160794 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70451c95-c6a0-474c-8707-3cb683987428-config-data\") pod \"glance-default-internal-api-0\" (UID: \"70451c95-c6a0-474c-8707-3cb683987428\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:34:22 crc kubenswrapper[4753]: I1205 17:34:22.160874 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/70451c95-c6a0-474c-8707-3cb683987428-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"70451c95-c6a0-474c-8707-3cb683987428\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:34:22 crc kubenswrapper[4753]: I1205 17:34:22.160977 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/70451c95-c6a0-474c-8707-3cb683987428-logs\") pod \"glance-default-internal-api-0\" (UID: \"70451c95-c6a0-474c-8707-3cb683987428\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:34:22 crc kubenswrapper[4753]: I1205 17:34:22.161025 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70451c95-c6a0-474c-8707-3cb683987428-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"70451c95-c6a0-474c-8707-3cb683987428\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:34:22 crc kubenswrapper[4753]: I1205 17:34:22.161134 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wbfn9\" (UniqueName: \"kubernetes.io/projected/70451c95-c6a0-474c-8707-3cb683987428-kube-api-access-wbfn9\") pod \"glance-default-internal-api-0\" (UID: \"70451c95-c6a0-474c-8707-3cb683987428\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:34:22 crc kubenswrapper[4753]: I1205 17:34:22.161335 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-6f7bb1ce-2b18-4809-8632-596a297f364b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6f7bb1ce-2b18-4809-8632-596a297f364b\") pod \"glance-default-internal-api-0\" (UID: \"70451c95-c6a0-474c-8707-3cb683987428\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:34:22 crc kubenswrapper[4753]: I1205 17:34:22.161369 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/70451c95-c6a0-474c-8707-3cb683987428-scripts\") pod \"glance-default-internal-api-0\" (UID: \"70451c95-c6a0-474c-8707-3cb683987428\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:34:22 crc kubenswrapper[4753]: I1205 17:34:22.161700 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/70451c95-c6a0-474c-8707-3cb683987428-logs\") pod \"glance-default-internal-api-0\" (UID: \"70451c95-c6a0-474c-8707-3cb683987428\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:34:22 crc kubenswrapper[4753]: I1205 17:34:22.164362 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/70451c95-c6a0-474c-8707-3cb683987428-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"70451c95-c6a0-474c-8707-3cb683987428\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:34:22 crc kubenswrapper[4753]: I1205 17:34:22.165780 4753 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 05 17:34:22 crc kubenswrapper[4753]: I1205 17:34:22.166055 4753 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-6f7bb1ce-2b18-4809-8632-596a297f364b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6f7bb1ce-2b18-4809-8632-596a297f364b\") pod \"glance-default-internal-api-0\" (UID: \"70451c95-c6a0-474c-8707-3cb683987428\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/df7f219cf3246d8ce07ad766614ffac20d7c8f72baddde817ee73d8a655238aa/globalmount\"" pod="openstack/glance-default-internal-api-0" Dec 05 17:34:22 crc kubenswrapper[4753]: I1205 17:34:22.169465 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70451c95-c6a0-474c-8707-3cb683987428-config-data\") pod \"glance-default-internal-api-0\" (UID: \"70451c95-c6a0-474c-8707-3cb683987428\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:34:22 crc kubenswrapper[4753]: I1205 17:34:22.170238 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/70451c95-c6a0-474c-8707-3cb683987428-scripts\") pod \"glance-default-internal-api-0\" (UID: \"70451c95-c6a0-474c-8707-3cb683987428\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:34:22 crc kubenswrapper[4753]: I1205 17:34:22.175840 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70451c95-c6a0-474c-8707-3cb683987428-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"70451c95-c6a0-474c-8707-3cb683987428\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:34:22 crc kubenswrapper[4753]: I1205 17:34:22.183662 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 17:34:22 crc kubenswrapper[4753]: I1205 17:34:22.196845 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wbfn9\" (UniqueName: \"kubernetes.io/projected/70451c95-c6a0-474c-8707-3cb683987428-kube-api-access-wbfn9\") pod \"glance-default-internal-api-0\" (UID: \"70451c95-c6a0-474c-8707-3cb683987428\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:34:22 crc kubenswrapper[4753]: I1205 17:34:22.219316 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-6f7bb1ce-2b18-4809-8632-596a297f364b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6f7bb1ce-2b18-4809-8632-596a297f364b\") pod \"glance-default-internal-api-0\" (UID: \"70451c95-c6a0-474c-8707-3cb683987428\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:34:22 crc kubenswrapper[4753]: I1205 17:34:22.241030 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 17:34:22 crc kubenswrapper[4753]: E1205 17:34:22.263377 4753 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0acfa75f_8372_4ccb_a91a_6741854cdd34.slice\": RecentStats: unable to find data in memory cache]" Dec 05 17:34:22 crc kubenswrapper[4753]: I1205 17:34:22.720611 4753 scope.go:117] "RemoveContainer" containerID="87682a74661e693e498cd793cc20d16fc9f4a3b8a1a6b54f10285e2dcd15eafd" Dec 05 17:34:22 crc kubenswrapper[4753]: E1205 17:34:22.721056 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 17:34:23 crc kubenswrapper[4753]: I1205 17:34:23.746317 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0acfa75f-8372-4ccb-a91a-6741854cdd34" path="/var/lib/kubelet/pods/0acfa75f-8372-4ccb-a91a-6741854cdd34/volumes" Dec 05 17:34:25 crc kubenswrapper[4753]: I1205 17:34:25.770592 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 17:34:25 crc kubenswrapper[4753]: I1205 17:34:25.854384 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 17:34:25 crc kubenswrapper[4753]: I1205 17:34:25.888663 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-klc67"] Dec 05 17:34:26 crc kubenswrapper[4753]: I1205 17:34:26.499978 4753 scope.go:117] "RemoveContainer" containerID="e3bfbefa10648c8108c78f4e3459c915a9f09cb2d3e9165e64dc7351820ef803" Dec 05 17:34:26 crc kubenswrapper[4753]: I1205 17:34:26.511023 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-6d4f9bcbff-wqwgp"] Dec 05 17:34:26 crc kubenswrapper[4753]: I1205 17:34:26.514625 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6d4f9bcbff-wqwgp" Dec 05 17:34:26 crc kubenswrapper[4753]: I1205 17:34:26.517566 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Dec 05 17:34:26 crc kubenswrapper[4753]: I1205 17:34:26.521487 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Dec 05 17:34:26 crc kubenswrapper[4753]: I1205 17:34:26.539940 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-6d4f9bcbff-wqwgp"] Dec 05 17:34:26 crc kubenswrapper[4753]: E1205 17:34:26.548137 4753 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-cloudkitty-api:current" Dec 05 17:34:26 crc kubenswrapper[4753]: E1205 17:34:26.548220 4753 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-cloudkitty-api:current" Dec 05 17:34:26 crc kubenswrapper[4753]: E1205 17:34:26.548380 4753 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cloudkitty-db-sync,Image:quay.rdoproject.org/podified-master-centos10/openstack-cloudkitty-api:current,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CloudKittyPassword,Value:,ValueFrom:&EnvVarSource{FieldRef:nil,ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:&SecretKeySelector{LocalObjectReference:LocalObjectReference{Name:osp-secret,},Key:CloudKittyPassword,Optional:nil,},},},EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/var/lib/openstack/bin,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:cloudkitty-dbsync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:certs,ReadOnly:true,MountPath:/var/lib/openstack/loki-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-t2db6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42406,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cloudkitty-db-sync-9vsjw_openstack(f5711686-6b64-450f-b2b4-6583dab08275): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 17:34:26 crc kubenswrapper[4753]: E1205 17:34:26.549524 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cloudkitty-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cloudkitty-db-sync-9vsjw" podUID="f5711686-6b64-450f-b2b4-6583dab08275" Dec 05 17:34:26 crc kubenswrapper[4753]: I1205 17:34:26.560513 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/a2d7ec08-5ff8-4470-a4a7-2d830d3f5261-config\") pod \"neutron-6d4f9bcbff-wqwgp\" (UID: \"a2d7ec08-5ff8-4470-a4a7-2d830d3f5261\") " pod="openstack/neutron-6d4f9bcbff-wqwgp" Dec 05 17:34:26 crc kubenswrapper[4753]: I1205 17:34:26.560871 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6tjg9\" (UniqueName: \"kubernetes.io/projected/a2d7ec08-5ff8-4470-a4a7-2d830d3f5261-kube-api-access-6tjg9\") pod \"neutron-6d4f9bcbff-wqwgp\" (UID: \"a2d7ec08-5ff8-4470-a4a7-2d830d3f5261\") " pod="openstack/neutron-6d4f9bcbff-wqwgp" Dec 05 17:34:26 crc kubenswrapper[4753]: I1205 17:34:26.561034 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a2d7ec08-5ff8-4470-a4a7-2d830d3f5261-httpd-config\") pod \"neutron-6d4f9bcbff-wqwgp\" (UID: \"a2d7ec08-5ff8-4470-a4a7-2d830d3f5261\") " pod="openstack/neutron-6d4f9bcbff-wqwgp" Dec 05 17:34:26 crc kubenswrapper[4753]: I1205 17:34:26.561168 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a2d7ec08-5ff8-4470-a4a7-2d830d3f5261-public-tls-certs\") pod \"neutron-6d4f9bcbff-wqwgp\" (UID: \"a2d7ec08-5ff8-4470-a4a7-2d830d3f5261\") " pod="openstack/neutron-6d4f9bcbff-wqwgp" Dec 05 17:34:26 crc kubenswrapper[4753]: I1205 17:34:26.561361 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a2d7ec08-5ff8-4470-a4a7-2d830d3f5261-internal-tls-certs\") pod \"neutron-6d4f9bcbff-wqwgp\" (UID: \"a2d7ec08-5ff8-4470-a4a7-2d830d3f5261\") " pod="openstack/neutron-6d4f9bcbff-wqwgp" Dec 05 17:34:26 crc kubenswrapper[4753]: I1205 17:34:26.561451 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2d7ec08-5ff8-4470-a4a7-2d830d3f5261-combined-ca-bundle\") pod \"neutron-6d4f9bcbff-wqwgp\" (UID: \"a2d7ec08-5ff8-4470-a4a7-2d830d3f5261\") " pod="openstack/neutron-6d4f9bcbff-wqwgp" Dec 05 17:34:26 crc kubenswrapper[4753]: I1205 17:34:26.561537 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a2d7ec08-5ff8-4470-a4a7-2d830d3f5261-ovndb-tls-certs\") pod \"neutron-6d4f9bcbff-wqwgp\" (UID: \"a2d7ec08-5ff8-4470-a4a7-2d830d3f5261\") " pod="openstack/neutron-6d4f9bcbff-wqwgp" Dec 05 17:34:26 crc kubenswrapper[4753]: W1205 17:34:26.575688 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5d74be7c_896f_4771_8d81_293a02d24cd5.slice/crio-1eafc4f5615c5942ef5544b5628955f14da54dfc0bf8fc073455283ceec5afcf WatchSource:0}: Error finding container 1eafc4f5615c5942ef5544b5628955f14da54dfc0bf8fc073455283ceec5afcf: Status 404 returned error can't find the container with id 1eafc4f5615c5942ef5544b5628955f14da54dfc0bf8fc073455283ceec5afcf Dec 05 17:34:26 crc kubenswrapper[4753]: I1205 17:34:26.667661 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a2d7ec08-5ff8-4470-a4a7-2d830d3f5261-internal-tls-certs\") pod \"neutron-6d4f9bcbff-wqwgp\" (UID: \"a2d7ec08-5ff8-4470-a4a7-2d830d3f5261\") " pod="openstack/neutron-6d4f9bcbff-wqwgp" Dec 05 17:34:26 crc kubenswrapper[4753]: I1205 17:34:26.667720 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2d7ec08-5ff8-4470-a4a7-2d830d3f5261-combined-ca-bundle\") pod \"neutron-6d4f9bcbff-wqwgp\" (UID: \"a2d7ec08-5ff8-4470-a4a7-2d830d3f5261\") " pod="openstack/neutron-6d4f9bcbff-wqwgp" Dec 05 17:34:26 crc kubenswrapper[4753]: I1205 17:34:26.667760 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a2d7ec08-5ff8-4470-a4a7-2d830d3f5261-ovndb-tls-certs\") pod \"neutron-6d4f9bcbff-wqwgp\" (UID: \"a2d7ec08-5ff8-4470-a4a7-2d830d3f5261\") " pod="openstack/neutron-6d4f9bcbff-wqwgp" Dec 05 17:34:26 crc kubenswrapper[4753]: I1205 17:34:26.667800 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/a2d7ec08-5ff8-4470-a4a7-2d830d3f5261-config\") pod \"neutron-6d4f9bcbff-wqwgp\" (UID: \"a2d7ec08-5ff8-4470-a4a7-2d830d3f5261\") " pod="openstack/neutron-6d4f9bcbff-wqwgp" Dec 05 17:34:26 crc kubenswrapper[4753]: I1205 17:34:26.667851 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6tjg9\" (UniqueName: \"kubernetes.io/projected/a2d7ec08-5ff8-4470-a4a7-2d830d3f5261-kube-api-access-6tjg9\") pod \"neutron-6d4f9bcbff-wqwgp\" (UID: \"a2d7ec08-5ff8-4470-a4a7-2d830d3f5261\") " pod="openstack/neutron-6d4f9bcbff-wqwgp" Dec 05 17:34:26 crc kubenswrapper[4753]: I1205 17:34:26.667895 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a2d7ec08-5ff8-4470-a4a7-2d830d3f5261-httpd-config\") pod \"neutron-6d4f9bcbff-wqwgp\" (UID: \"a2d7ec08-5ff8-4470-a4a7-2d830d3f5261\") " pod="openstack/neutron-6d4f9bcbff-wqwgp" Dec 05 17:34:26 crc kubenswrapper[4753]: I1205 17:34:26.667942 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a2d7ec08-5ff8-4470-a4a7-2d830d3f5261-public-tls-certs\") pod \"neutron-6d4f9bcbff-wqwgp\" (UID: \"a2d7ec08-5ff8-4470-a4a7-2d830d3f5261\") " pod="openstack/neutron-6d4f9bcbff-wqwgp" Dec 05 17:34:26 crc kubenswrapper[4753]: I1205 17:34:26.676010 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/a2d7ec08-5ff8-4470-a4a7-2d830d3f5261-config\") pod \"neutron-6d4f9bcbff-wqwgp\" (UID: \"a2d7ec08-5ff8-4470-a4a7-2d830d3f5261\") " pod="openstack/neutron-6d4f9bcbff-wqwgp" Dec 05 17:34:26 crc kubenswrapper[4753]: I1205 17:34:26.683407 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/a2d7ec08-5ff8-4470-a4a7-2d830d3f5261-ovndb-tls-certs\") pod \"neutron-6d4f9bcbff-wqwgp\" (UID: \"a2d7ec08-5ff8-4470-a4a7-2d830d3f5261\") " pod="openstack/neutron-6d4f9bcbff-wqwgp" Dec 05 17:34:26 crc kubenswrapper[4753]: I1205 17:34:26.684028 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2d7ec08-5ff8-4470-a4a7-2d830d3f5261-combined-ca-bundle\") pod \"neutron-6d4f9bcbff-wqwgp\" (UID: \"a2d7ec08-5ff8-4470-a4a7-2d830d3f5261\") " pod="openstack/neutron-6d4f9bcbff-wqwgp" Dec 05 17:34:26 crc kubenswrapper[4753]: I1205 17:34:26.684256 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/a2d7ec08-5ff8-4470-a4a7-2d830d3f5261-httpd-config\") pod \"neutron-6d4f9bcbff-wqwgp\" (UID: \"a2d7ec08-5ff8-4470-a4a7-2d830d3f5261\") " pod="openstack/neutron-6d4f9bcbff-wqwgp" Dec 05 17:34:26 crc kubenswrapper[4753]: I1205 17:34:26.685025 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/a2d7ec08-5ff8-4470-a4a7-2d830d3f5261-public-tls-certs\") pod \"neutron-6d4f9bcbff-wqwgp\" (UID: \"a2d7ec08-5ff8-4470-a4a7-2d830d3f5261\") " pod="openstack/neutron-6d4f9bcbff-wqwgp" Dec 05 17:34:26 crc kubenswrapper[4753]: I1205 17:34:26.685650 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/a2d7ec08-5ff8-4470-a4a7-2d830d3f5261-internal-tls-certs\") pod \"neutron-6d4f9bcbff-wqwgp\" (UID: \"a2d7ec08-5ff8-4470-a4a7-2d830d3f5261\") " pod="openstack/neutron-6d4f9bcbff-wqwgp" Dec 05 17:34:26 crc kubenswrapper[4753]: I1205 17:34:26.701810 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6tjg9\" (UniqueName: \"kubernetes.io/projected/a2d7ec08-5ff8-4470-a4a7-2d830d3f5261-kube-api-access-6tjg9\") pod \"neutron-6d4f9bcbff-wqwgp\" (UID: \"a2d7ec08-5ff8-4470-a4a7-2d830d3f5261\") " pod="openstack/neutron-6d4f9bcbff-wqwgp" Dec 05 17:34:26 crc kubenswrapper[4753]: I1205 17:34:26.792253 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6d4f9bcbff-wqwgp" Dec 05 17:34:27 crc kubenswrapper[4753]: I1205 17:34:27.012947 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-klc67" event={"ID":"5d74be7c-896f-4771-8d81-293a02d24cd5","Type":"ContainerStarted","Data":"1eafc4f5615c5942ef5544b5628955f14da54dfc0bf8fc073455283ceec5afcf"} Dec 05 17:34:27 crc kubenswrapper[4753]: E1205 17:34:27.028208 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cloudkitty-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-master-centos10/openstack-cloudkitty-api:current\\\"\"" pod="openstack/cloudkitty-db-sync-9vsjw" podUID="f5711686-6b64-450f-b2b4-6583dab08275" Dec 05 17:34:27 crc kubenswrapper[4753]: I1205 17:34:27.399284 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-c57d65f96-d2bw4"] Dec 05 17:34:27 crc kubenswrapper[4753]: I1205 17:34:27.482852 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5ccc5c4795-stl2q"] Dec 05 17:34:27 crc kubenswrapper[4753]: I1205 17:34:27.661342 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-6d4f9bcbff-wqwgp"] Dec 05 17:34:28 crc kubenswrapper[4753]: I1205 17:34:28.034048 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cffca222-0336-40c8-886f-247667322702","Type":"ContainerStarted","Data":"c5c0a4fafd3b40829436466d86fa7132ec845878079668fc5f2fb5a78a8c4968"} Dec 05 17:34:28 crc kubenswrapper[4753]: I1205 17:34:28.036012 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-klc67" event={"ID":"5d74be7c-896f-4771-8d81-293a02d24cd5","Type":"ContainerStarted","Data":"62ed0226dbfaa3d50e7bb491688c2ee289e0378607a353b658f6c874aced0309"} Dec 05 17:34:28 crc kubenswrapper[4753]: I1205 17:34:28.039426 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc5c4795-stl2q" event={"ID":"7fa187c8-5ebc-4112-8551-0c551b537f32","Type":"ContainerStarted","Data":"434a79eef47c833a3a354e08ba2c127bb4e15e66591a9431f2ca4e00c573db17"} Dec 05 17:34:28 crc kubenswrapper[4753]: I1205 17:34:28.041656 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6d4f9bcbff-wqwgp" event={"ID":"a2d7ec08-5ff8-4470-a4a7-2d830d3f5261","Type":"ContainerStarted","Data":"e01b38437e09d2fcd27fffac841171f9ccdead4615197d69ad136fb672e7f725"} Dec 05 17:34:28 crc kubenswrapper[4753]: I1205 17:34:28.048866 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-d47nm" event={"ID":"c6486f24-9b1d-40a4-bfff-7126ab5b1ba4","Type":"ContainerStarted","Data":"055bb8811e611e031ed0d48db68708c72de592577aab13446925fce025eb4e8d"} Dec 05 17:34:28 crc kubenswrapper[4753]: I1205 17:34:28.051239 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-c57d65f96-d2bw4" event={"ID":"c69accdb-191f-4ea8-905c-308492e5c663","Type":"ContainerStarted","Data":"cc202ebae752a8f9147c2a29aa88d72437cad32f7fa4a5d208c3217417e57035"} Dec 05 17:34:28 crc kubenswrapper[4753]: I1205 17:34:28.058829 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-klc67" podStartSLOduration=28.058814016 podStartE2EDuration="28.058814016s" podCreationTimestamp="2025-12-05 17:34:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:34:28.057049856 +0000 UTC m=+1806.560156862" watchObservedRunningTime="2025-12-05 17:34:28.058814016 +0000 UTC m=+1806.561921022" Dec 05 17:34:28 crc kubenswrapper[4753]: I1205 17:34:28.080637 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-d47nm" podStartSLOduration=7.803518217 podStartE2EDuration="41.080620873s" podCreationTimestamp="2025-12-05 17:33:47 +0000 UTC" firstStartedPulling="2025-12-05 17:33:49.645725313 +0000 UTC m=+1768.148832319" lastFinishedPulling="2025-12-05 17:34:22.922827969 +0000 UTC m=+1801.425934975" observedRunningTime="2025-12-05 17:34:28.077294839 +0000 UTC m=+1806.580401845" watchObservedRunningTime="2025-12-05 17:34:28.080620873 +0000 UTC m=+1806.583727879" Dec 05 17:34:28 crc kubenswrapper[4753]: I1205 17:34:28.249716 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 17:34:28 crc kubenswrapper[4753]: W1205 17:34:28.256118 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podab469bdf_ba51_42a6_9f6b_ce772e850050.slice/crio-c94e6d519089765bc2bd40d784cb01880eab4ea0458332f888f85ab3e504d003 WatchSource:0}: Error finding container c94e6d519089765bc2bd40d784cb01880eab4ea0458332f888f85ab3e504d003: Status 404 returned error can't find the container with id c94e6d519089765bc2bd40d784cb01880eab4ea0458332f888f85ab3e504d003 Dec 05 17:34:28 crc kubenswrapper[4753]: I1205 17:34:28.942619 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 17:34:29 crc kubenswrapper[4753]: I1205 17:34:29.064242 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ab469bdf-ba51-42a6-9f6b-ce772e850050","Type":"ContainerStarted","Data":"03fdac034900480e6278f063cf472d50acaf05a1e6be9e85c7e861a5647f82cf"} Dec 05 17:34:29 crc kubenswrapper[4753]: I1205 17:34:29.064547 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ab469bdf-ba51-42a6-9f6b-ce772e850050","Type":"ContainerStarted","Data":"c94e6d519089765bc2bd40d784cb01880eab4ea0458332f888f85ab3e504d003"} Dec 05 17:34:29 crc kubenswrapper[4753]: I1205 17:34:29.066010 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"70451c95-c6a0-474c-8707-3cb683987428","Type":"ContainerStarted","Data":"d12e9bb1a0a8e43894a8abe1e1e742ef508f672d39826822e2b8832ab2969779"} Dec 05 17:34:29 crc kubenswrapper[4753]: I1205 17:34:29.072058 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-c57d65f96-d2bw4" event={"ID":"c69accdb-191f-4ea8-905c-308492e5c663","Type":"ContainerStarted","Data":"c592035b030be2d64129c175ef02f565c911e76af14dc1910fe0d629104a3d69"} Dec 05 17:34:29 crc kubenswrapper[4753]: I1205 17:34:29.072109 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-c57d65f96-d2bw4" event={"ID":"c69accdb-191f-4ea8-905c-308492e5c663","Type":"ContainerStarted","Data":"1ba8238bf495bb097c80467c828e210dfe78a57fcde23ab5146d7473dd1d52b6"} Dec 05 17:34:29 crc kubenswrapper[4753]: I1205 17:34:29.072197 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-c57d65f96-d2bw4" Dec 05 17:34:29 crc kubenswrapper[4753]: I1205 17:34:29.078605 4753 generic.go:334] "Generic (PLEG): container finished" podID="7fa187c8-5ebc-4112-8551-0c551b537f32" containerID="ec73a62712b61d6145fd1b2dae64e66805d2154337e603668aea42d454749f99" exitCode=0 Dec 05 17:34:29 crc kubenswrapper[4753]: I1205 17:34:29.078709 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc5c4795-stl2q" event={"ID":"7fa187c8-5ebc-4112-8551-0c551b537f32","Type":"ContainerDied","Data":"ec73a62712b61d6145fd1b2dae64e66805d2154337e603668aea42d454749f99"} Dec 05 17:34:29 crc kubenswrapper[4753]: I1205 17:34:29.087100 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6d4f9bcbff-wqwgp" event={"ID":"a2d7ec08-5ff8-4470-a4a7-2d830d3f5261","Type":"ContainerStarted","Data":"f5f94c4bfe5db79c18e1767c1f511b4a9909c6647bd5b8879e9c6d54009a8378"} Dec 05 17:34:29 crc kubenswrapper[4753]: I1205 17:34:29.087491 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6d4f9bcbff-wqwgp" event={"ID":"a2d7ec08-5ff8-4470-a4a7-2d830d3f5261","Type":"ContainerStarted","Data":"9a374ac6e25b43222b18f55e4a0d888ac918977bd493bd7a0c883c27f963d639"} Dec 05 17:34:29 crc kubenswrapper[4753]: I1205 17:34:29.088228 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-6d4f9bcbff-wqwgp" Dec 05 17:34:29 crc kubenswrapper[4753]: I1205 17:34:29.110106 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-c57d65f96-d2bw4" podStartSLOduration=9.110089707 podStartE2EDuration="9.110089707s" podCreationTimestamp="2025-12-05 17:34:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:34:29.101403521 +0000 UTC m=+1807.604510527" watchObservedRunningTime="2025-12-05 17:34:29.110089707 +0000 UTC m=+1807.613196713" Dec 05 17:34:29 crc kubenswrapper[4753]: I1205 17:34:29.143587 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-6d4f9bcbff-wqwgp" podStartSLOduration=3.143559745 podStartE2EDuration="3.143559745s" podCreationTimestamp="2025-12-05 17:34:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:34:29.135397574 +0000 UTC m=+1807.638504580" watchObservedRunningTime="2025-12-05 17:34:29.143559745 +0000 UTC m=+1807.646666751" Dec 05 17:34:30 crc kubenswrapper[4753]: I1205 17:34:30.110928 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ab469bdf-ba51-42a6-9f6b-ce772e850050","Type":"ContainerStarted","Data":"1df7342aa1587e36dbf632c5369b179fe7f4e7a22c024804d46876a5f5ffba38"} Dec 05 17:34:30 crc kubenswrapper[4753]: I1205 17:34:30.111011 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="ab469bdf-ba51-42a6-9f6b-ce772e850050" containerName="glance-log" containerID="cri-o://03fdac034900480e6278f063cf472d50acaf05a1e6be9e85c7e861a5647f82cf" gracePeriod=30 Dec 05 17:34:30 crc kubenswrapper[4753]: I1205 17:34:30.111319 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="ab469bdf-ba51-42a6-9f6b-ce772e850050" containerName="glance-httpd" containerID="cri-o://1df7342aa1587e36dbf632c5369b179fe7f4e7a22c024804d46876a5f5ffba38" gracePeriod=30 Dec 05 17:34:30 crc kubenswrapper[4753]: I1205 17:34:30.117116 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"70451c95-c6a0-474c-8707-3cb683987428","Type":"ContainerStarted","Data":"ec434f44447fcb1eb98dc000dff5ec92c6bab5e2f8fb624c8690344db0754130"} Dec 05 17:34:30 crc kubenswrapper[4753]: I1205 17:34:30.121777 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc5c4795-stl2q" event={"ID":"7fa187c8-5ebc-4112-8551-0c551b537f32","Type":"ContainerStarted","Data":"2ff602be5fec339579546a2175b8050cc98dfdb05b8bd83a288cd0835e0ade94"} Dec 05 17:34:30 crc kubenswrapper[4753]: I1205 17:34:30.123104 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5ccc5c4795-stl2q" Dec 05 17:34:30 crc kubenswrapper[4753]: I1205 17:34:30.133844 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-d47nm" event={"ID":"c6486f24-9b1d-40a4-bfff-7126ab5b1ba4","Type":"ContainerDied","Data":"055bb8811e611e031ed0d48db68708c72de592577aab13446925fce025eb4e8d"} Dec 05 17:34:30 crc kubenswrapper[4753]: I1205 17:34:30.134523 4753 generic.go:334] "Generic (PLEG): container finished" podID="c6486f24-9b1d-40a4-bfff-7126ab5b1ba4" containerID="055bb8811e611e031ed0d48db68708c72de592577aab13446925fce025eb4e8d" exitCode=0 Dec 05 17:34:30 crc kubenswrapper[4753]: I1205 17:34:30.154928 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=10.154906285 podStartE2EDuration="10.154906285s" podCreationTimestamp="2025-12-05 17:34:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:34:30.133368055 +0000 UTC m=+1808.636475081" watchObservedRunningTime="2025-12-05 17:34:30.154906285 +0000 UTC m=+1808.658013291" Dec 05 17:34:30 crc kubenswrapper[4753]: I1205 17:34:30.181582 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5ccc5c4795-stl2q" podStartSLOduration=10.18156205 podStartE2EDuration="10.18156205s" podCreationTimestamp="2025-12-05 17:34:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:34:30.152349922 +0000 UTC m=+1808.655456928" watchObservedRunningTime="2025-12-05 17:34:30.18156205 +0000 UTC m=+1808.684669056" Dec 05 17:34:30 crc kubenswrapper[4753]: I1205 17:34:30.825261 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 17:34:30 crc kubenswrapper[4753]: I1205 17:34:30.954951 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ab469bdf-ba51-42a6-9f6b-ce772e850050-logs\") pod \"ab469bdf-ba51-42a6-9f6b-ce772e850050\" (UID: \"ab469bdf-ba51-42a6-9f6b-ce772e850050\") " Dec 05 17:34:30 crc kubenswrapper[4753]: I1205 17:34:30.955009 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab469bdf-ba51-42a6-9f6b-ce772e850050-combined-ca-bundle\") pod \"ab469bdf-ba51-42a6-9f6b-ce772e850050\" (UID: \"ab469bdf-ba51-42a6-9f6b-ce772e850050\") " Dec 05 17:34:30 crc kubenswrapper[4753]: I1205 17:34:30.955204 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8aedb3c5-dd54-4f7e-b1a5-0e458df9d312\") pod \"ab469bdf-ba51-42a6-9f6b-ce772e850050\" (UID: \"ab469bdf-ba51-42a6-9f6b-ce772e850050\") " Dec 05 17:34:30 crc kubenswrapper[4753]: I1205 17:34:30.955317 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab469bdf-ba51-42a6-9f6b-ce772e850050-config-data\") pod \"ab469bdf-ba51-42a6-9f6b-ce772e850050\" (UID: \"ab469bdf-ba51-42a6-9f6b-ce772e850050\") " Dec 05 17:34:30 crc kubenswrapper[4753]: I1205 17:34:30.955352 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ab469bdf-ba51-42a6-9f6b-ce772e850050-scripts\") pod \"ab469bdf-ba51-42a6-9f6b-ce772e850050\" (UID: \"ab469bdf-ba51-42a6-9f6b-ce772e850050\") " Dec 05 17:34:30 crc kubenswrapper[4753]: I1205 17:34:30.955383 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ab469bdf-ba51-42a6-9f6b-ce772e850050-httpd-run\") pod \"ab469bdf-ba51-42a6-9f6b-ce772e850050\" (UID: \"ab469bdf-ba51-42a6-9f6b-ce772e850050\") " Dec 05 17:34:30 crc kubenswrapper[4753]: I1205 17:34:30.955478 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-72hnh\" (UniqueName: \"kubernetes.io/projected/ab469bdf-ba51-42a6-9f6b-ce772e850050-kube-api-access-72hnh\") pod \"ab469bdf-ba51-42a6-9f6b-ce772e850050\" (UID: \"ab469bdf-ba51-42a6-9f6b-ce772e850050\") " Dec 05 17:34:30 crc kubenswrapper[4753]: I1205 17:34:30.955644 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ab469bdf-ba51-42a6-9f6b-ce772e850050-logs" (OuterVolumeSpecName: "logs") pod "ab469bdf-ba51-42a6-9f6b-ce772e850050" (UID: "ab469bdf-ba51-42a6-9f6b-ce772e850050"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:34:30 crc kubenswrapper[4753]: I1205 17:34:30.955956 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ab469bdf-ba51-42a6-9f6b-ce772e850050-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "ab469bdf-ba51-42a6-9f6b-ce772e850050" (UID: "ab469bdf-ba51-42a6-9f6b-ce772e850050"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:34:30 crc kubenswrapper[4753]: I1205 17:34:30.957252 4753 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ab469bdf-ba51-42a6-9f6b-ce772e850050-logs\") on node \"crc\" DevicePath \"\"" Dec 05 17:34:30 crc kubenswrapper[4753]: I1205 17:34:30.957276 4753 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ab469bdf-ba51-42a6-9f6b-ce772e850050-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 05 17:34:30 crc kubenswrapper[4753]: I1205 17:34:30.962568 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab469bdf-ba51-42a6-9f6b-ce772e850050-scripts" (OuterVolumeSpecName: "scripts") pod "ab469bdf-ba51-42a6-9f6b-ce772e850050" (UID: "ab469bdf-ba51-42a6-9f6b-ce772e850050"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:34:30 crc kubenswrapper[4753]: I1205 17:34:30.964300 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab469bdf-ba51-42a6-9f6b-ce772e850050-kube-api-access-72hnh" (OuterVolumeSpecName: "kube-api-access-72hnh") pod "ab469bdf-ba51-42a6-9f6b-ce772e850050" (UID: "ab469bdf-ba51-42a6-9f6b-ce772e850050"). InnerVolumeSpecName "kube-api-access-72hnh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:34:30 crc kubenswrapper[4753]: I1205 17:34:30.978489 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8aedb3c5-dd54-4f7e-b1a5-0e458df9d312" (OuterVolumeSpecName: "glance") pod "ab469bdf-ba51-42a6-9f6b-ce772e850050" (UID: "ab469bdf-ba51-42a6-9f6b-ce772e850050"). InnerVolumeSpecName "pvc-8aedb3c5-dd54-4f7e-b1a5-0e458df9d312". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 05 17:34:30 crc kubenswrapper[4753]: I1205 17:34:30.992222 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab469bdf-ba51-42a6-9f6b-ce772e850050-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ab469bdf-ba51-42a6-9f6b-ce772e850050" (UID: "ab469bdf-ba51-42a6-9f6b-ce772e850050"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:34:31 crc kubenswrapper[4753]: I1205 17:34:31.022629 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ab469bdf-ba51-42a6-9f6b-ce772e850050-config-data" (OuterVolumeSpecName: "config-data") pod "ab469bdf-ba51-42a6-9f6b-ce772e850050" (UID: "ab469bdf-ba51-42a6-9f6b-ce772e850050"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:34:31 crc kubenswrapper[4753]: I1205 17:34:31.060909 4753 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab469bdf-ba51-42a6-9f6b-ce772e850050-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:34:31 crc kubenswrapper[4753]: I1205 17:34:31.061268 4753 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-8aedb3c5-dd54-4f7e-b1a5-0e458df9d312\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8aedb3c5-dd54-4f7e-b1a5-0e458df9d312\") on node \"crc\" " Dec 05 17:34:31 crc kubenswrapper[4753]: I1205 17:34:31.061371 4753 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ab469bdf-ba51-42a6-9f6b-ce772e850050-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:34:31 crc kubenswrapper[4753]: I1205 17:34:31.061441 4753 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ab469bdf-ba51-42a6-9f6b-ce772e850050-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:34:31 crc kubenswrapper[4753]: I1205 17:34:31.061510 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-72hnh\" (UniqueName: \"kubernetes.io/projected/ab469bdf-ba51-42a6-9f6b-ce772e850050-kube-api-access-72hnh\") on node \"crc\" DevicePath \"\"" Dec 05 17:34:31 crc kubenswrapper[4753]: I1205 17:34:31.143721 4753 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Dec 05 17:34:31 crc kubenswrapper[4753]: I1205 17:34:31.143970 4753 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-8aedb3c5-dd54-4f7e-b1a5-0e458df9d312" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8aedb3c5-dd54-4f7e-b1a5-0e458df9d312") on node "crc" Dec 05 17:34:31 crc kubenswrapper[4753]: I1205 17:34:31.167865 4753 reconciler_common.go:293] "Volume detached for volume \"pvc-8aedb3c5-dd54-4f7e-b1a5-0e458df9d312\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8aedb3c5-dd54-4f7e-b1a5-0e458df9d312\") on node \"crc\" DevicePath \"\"" Dec 05 17:34:31 crc kubenswrapper[4753]: I1205 17:34:31.179941 4753 generic.go:334] "Generic (PLEG): container finished" podID="ab469bdf-ba51-42a6-9f6b-ce772e850050" containerID="1df7342aa1587e36dbf632c5369b179fe7f4e7a22c024804d46876a5f5ffba38" exitCode=143 Dec 05 17:34:31 crc kubenswrapper[4753]: I1205 17:34:31.179986 4753 generic.go:334] "Generic (PLEG): container finished" podID="ab469bdf-ba51-42a6-9f6b-ce772e850050" containerID="03fdac034900480e6278f063cf472d50acaf05a1e6be9e85c7e861a5647f82cf" exitCode=143 Dec 05 17:34:31 crc kubenswrapper[4753]: I1205 17:34:31.180048 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ab469bdf-ba51-42a6-9f6b-ce772e850050","Type":"ContainerDied","Data":"1df7342aa1587e36dbf632c5369b179fe7f4e7a22c024804d46876a5f5ffba38"} Dec 05 17:34:31 crc kubenswrapper[4753]: I1205 17:34:31.180074 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ab469bdf-ba51-42a6-9f6b-ce772e850050","Type":"ContainerDied","Data":"03fdac034900480e6278f063cf472d50acaf05a1e6be9e85c7e861a5647f82cf"} Dec 05 17:34:31 crc kubenswrapper[4753]: I1205 17:34:31.180084 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"ab469bdf-ba51-42a6-9f6b-ce772e850050","Type":"ContainerDied","Data":"c94e6d519089765bc2bd40d784cb01880eab4ea0458332f888f85ab3e504d003"} Dec 05 17:34:31 crc kubenswrapper[4753]: I1205 17:34:31.180100 4753 scope.go:117] "RemoveContainer" containerID="1df7342aa1587e36dbf632c5369b179fe7f4e7a22c024804d46876a5f5ffba38" Dec 05 17:34:31 crc kubenswrapper[4753]: I1205 17:34:31.180223 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 17:34:31 crc kubenswrapper[4753]: I1205 17:34:31.206538 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"70451c95-c6a0-474c-8707-3cb683987428","Type":"ContainerStarted","Data":"bd77eba2adb2e511c59a5571dd3458348425bcd9eafd7164e32a3404b65229bf"} Dec 05 17:34:31 crc kubenswrapper[4753]: I1205 17:34:31.208276 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="70451c95-c6a0-474c-8707-3cb683987428" containerName="glance-log" containerID="cri-o://ec434f44447fcb1eb98dc000dff5ec92c6bab5e2f8fb624c8690344db0754130" gracePeriod=30 Dec 05 17:34:31 crc kubenswrapper[4753]: I1205 17:34:31.208959 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="70451c95-c6a0-474c-8707-3cb683987428" containerName="glance-httpd" containerID="cri-o://bd77eba2adb2e511c59a5571dd3458348425bcd9eafd7164e32a3404b65229bf" gracePeriod=30 Dec 05 17:34:31 crc kubenswrapper[4753]: I1205 17:34:31.268018 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=11.267993737 podStartE2EDuration="11.267993737s" podCreationTimestamp="2025-12-05 17:34:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:34:31.259664351 +0000 UTC m=+1809.762771367" watchObservedRunningTime="2025-12-05 17:34:31.267993737 +0000 UTC m=+1809.771100743" Dec 05 17:34:31 crc kubenswrapper[4753]: I1205 17:34:31.300342 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 17:34:31 crc kubenswrapper[4753]: I1205 17:34:31.316198 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 17:34:31 crc kubenswrapper[4753]: I1205 17:34:31.323922 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 17:34:31 crc kubenswrapper[4753]: E1205 17:34:31.324401 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab469bdf-ba51-42a6-9f6b-ce772e850050" containerName="glance-log" Dec 05 17:34:31 crc kubenswrapper[4753]: I1205 17:34:31.324414 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab469bdf-ba51-42a6-9f6b-ce772e850050" containerName="glance-log" Dec 05 17:34:31 crc kubenswrapper[4753]: E1205 17:34:31.324443 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab469bdf-ba51-42a6-9f6b-ce772e850050" containerName="glance-httpd" Dec 05 17:34:31 crc kubenswrapper[4753]: I1205 17:34:31.324449 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab469bdf-ba51-42a6-9f6b-ce772e850050" containerName="glance-httpd" Dec 05 17:34:31 crc kubenswrapper[4753]: I1205 17:34:31.324631 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab469bdf-ba51-42a6-9f6b-ce772e850050" containerName="glance-httpd" Dec 05 17:34:31 crc kubenswrapper[4753]: I1205 17:34:31.324657 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab469bdf-ba51-42a6-9f6b-ce772e850050" containerName="glance-log" Dec 05 17:34:31 crc kubenswrapper[4753]: I1205 17:34:31.325712 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 17:34:31 crc kubenswrapper[4753]: I1205 17:34:31.332610 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 05 17:34:31 crc kubenswrapper[4753]: I1205 17:34:31.332871 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 05 17:34:31 crc kubenswrapper[4753]: I1205 17:34:31.342007 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 17:34:31 crc kubenswrapper[4753]: I1205 17:34:31.376535 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8e5b52fd-87d5-4912-a8b2-305d4271a5ba-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"8e5b52fd-87d5-4912-a8b2-305d4271a5ba\") " pod="openstack/glance-default-external-api-0" Dec 05 17:34:31 crc kubenswrapper[4753]: I1205 17:34:31.376583 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e5b52fd-87d5-4912-a8b2-305d4271a5ba-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"8e5b52fd-87d5-4912-a8b2-305d4271a5ba\") " pod="openstack/glance-default-external-api-0" Dec 05 17:34:31 crc kubenswrapper[4753]: I1205 17:34:31.376618 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8e5b52fd-87d5-4912-a8b2-305d4271a5ba-logs\") pod \"glance-default-external-api-0\" (UID: \"8e5b52fd-87d5-4912-a8b2-305d4271a5ba\") " pod="openstack/glance-default-external-api-0" Dec 05 17:34:31 crc kubenswrapper[4753]: I1205 17:34:31.376668 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-8aedb3c5-dd54-4f7e-b1a5-0e458df9d312\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8aedb3c5-dd54-4f7e-b1a5-0e458df9d312\") pod \"glance-default-external-api-0\" (UID: \"8e5b52fd-87d5-4912-a8b2-305d4271a5ba\") " pod="openstack/glance-default-external-api-0" Dec 05 17:34:31 crc kubenswrapper[4753]: I1205 17:34:31.376686 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8e5b52fd-87d5-4912-a8b2-305d4271a5ba-config-data\") pod \"glance-default-external-api-0\" (UID: \"8e5b52fd-87d5-4912-a8b2-305d4271a5ba\") " pod="openstack/glance-default-external-api-0" Dec 05 17:34:31 crc kubenswrapper[4753]: I1205 17:34:31.376712 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8e5b52fd-87d5-4912-a8b2-305d4271a5ba-scripts\") pod \"glance-default-external-api-0\" (UID: \"8e5b52fd-87d5-4912-a8b2-305d4271a5ba\") " pod="openstack/glance-default-external-api-0" Dec 05 17:34:31 crc kubenswrapper[4753]: I1205 17:34:31.376738 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cqpfr\" (UniqueName: \"kubernetes.io/projected/8e5b52fd-87d5-4912-a8b2-305d4271a5ba-kube-api-access-cqpfr\") pod \"glance-default-external-api-0\" (UID: \"8e5b52fd-87d5-4912-a8b2-305d4271a5ba\") " pod="openstack/glance-default-external-api-0" Dec 05 17:34:31 crc kubenswrapper[4753]: I1205 17:34:31.376810 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8e5b52fd-87d5-4912-a8b2-305d4271a5ba-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"8e5b52fd-87d5-4912-a8b2-305d4271a5ba\") " pod="openstack/glance-default-external-api-0" Dec 05 17:34:31 crc kubenswrapper[4753]: I1205 17:34:31.481442 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8e5b52fd-87d5-4912-a8b2-305d4271a5ba-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"8e5b52fd-87d5-4912-a8b2-305d4271a5ba\") " pod="openstack/glance-default-external-api-0" Dec 05 17:34:31 crc kubenswrapper[4753]: I1205 17:34:31.481858 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e5b52fd-87d5-4912-a8b2-305d4271a5ba-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"8e5b52fd-87d5-4912-a8b2-305d4271a5ba\") " pod="openstack/glance-default-external-api-0" Dec 05 17:34:31 crc kubenswrapper[4753]: I1205 17:34:31.481945 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8e5b52fd-87d5-4912-a8b2-305d4271a5ba-logs\") pod \"glance-default-external-api-0\" (UID: \"8e5b52fd-87d5-4912-a8b2-305d4271a5ba\") " pod="openstack/glance-default-external-api-0" Dec 05 17:34:31 crc kubenswrapper[4753]: I1205 17:34:31.482054 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-8aedb3c5-dd54-4f7e-b1a5-0e458df9d312\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8aedb3c5-dd54-4f7e-b1a5-0e458df9d312\") pod \"glance-default-external-api-0\" (UID: \"8e5b52fd-87d5-4912-a8b2-305d4271a5ba\") " pod="openstack/glance-default-external-api-0" Dec 05 17:34:31 crc kubenswrapper[4753]: I1205 17:34:31.482123 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8e5b52fd-87d5-4912-a8b2-305d4271a5ba-config-data\") pod \"glance-default-external-api-0\" (UID: \"8e5b52fd-87d5-4912-a8b2-305d4271a5ba\") " pod="openstack/glance-default-external-api-0" Dec 05 17:34:31 crc kubenswrapper[4753]: I1205 17:34:31.482220 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8e5b52fd-87d5-4912-a8b2-305d4271a5ba-scripts\") pod \"glance-default-external-api-0\" (UID: \"8e5b52fd-87d5-4912-a8b2-305d4271a5ba\") " pod="openstack/glance-default-external-api-0" Dec 05 17:34:31 crc kubenswrapper[4753]: I1205 17:34:31.482311 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqpfr\" (UniqueName: \"kubernetes.io/projected/8e5b52fd-87d5-4912-a8b2-305d4271a5ba-kube-api-access-cqpfr\") pod \"glance-default-external-api-0\" (UID: \"8e5b52fd-87d5-4912-a8b2-305d4271a5ba\") " pod="openstack/glance-default-external-api-0" Dec 05 17:34:31 crc kubenswrapper[4753]: I1205 17:34:31.482442 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8e5b52fd-87d5-4912-a8b2-305d4271a5ba-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"8e5b52fd-87d5-4912-a8b2-305d4271a5ba\") " pod="openstack/glance-default-external-api-0" Dec 05 17:34:31 crc kubenswrapper[4753]: I1205 17:34:31.483128 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8e5b52fd-87d5-4912-a8b2-305d4271a5ba-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"8e5b52fd-87d5-4912-a8b2-305d4271a5ba\") " pod="openstack/glance-default-external-api-0" Dec 05 17:34:31 crc kubenswrapper[4753]: I1205 17:34:31.488580 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8e5b52fd-87d5-4912-a8b2-305d4271a5ba-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"8e5b52fd-87d5-4912-a8b2-305d4271a5ba\") " pod="openstack/glance-default-external-api-0" Dec 05 17:34:31 crc kubenswrapper[4753]: I1205 17:34:31.494097 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8e5b52fd-87d5-4912-a8b2-305d4271a5ba-config-data\") pod \"glance-default-external-api-0\" (UID: \"8e5b52fd-87d5-4912-a8b2-305d4271a5ba\") " pod="openstack/glance-default-external-api-0" Dec 05 17:34:31 crc kubenswrapper[4753]: I1205 17:34:31.495741 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8e5b52fd-87d5-4912-a8b2-305d4271a5ba-logs\") pod \"glance-default-external-api-0\" (UID: \"8e5b52fd-87d5-4912-a8b2-305d4271a5ba\") " pod="openstack/glance-default-external-api-0" Dec 05 17:34:31 crc kubenswrapper[4753]: I1205 17:34:31.500421 4753 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 05 17:34:31 crc kubenswrapper[4753]: I1205 17:34:31.500455 4753 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-8aedb3c5-dd54-4f7e-b1a5-0e458df9d312\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8aedb3c5-dd54-4f7e-b1a5-0e458df9d312\") pod \"glance-default-external-api-0\" (UID: \"8e5b52fd-87d5-4912-a8b2-305d4271a5ba\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/0a0f782b6c9e85c3ca02611e1c19370b05b7adf40a84b1c1fba977879932e0cc/globalmount\"" pod="openstack/glance-default-external-api-0" Dec 05 17:34:31 crc kubenswrapper[4753]: I1205 17:34:31.507825 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e5b52fd-87d5-4912-a8b2-305d4271a5ba-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"8e5b52fd-87d5-4912-a8b2-305d4271a5ba\") " pod="openstack/glance-default-external-api-0" Dec 05 17:34:31 crc kubenswrapper[4753]: I1205 17:34:31.514701 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8e5b52fd-87d5-4912-a8b2-305d4271a5ba-scripts\") pod \"glance-default-external-api-0\" (UID: \"8e5b52fd-87d5-4912-a8b2-305d4271a5ba\") " pod="openstack/glance-default-external-api-0" Dec 05 17:34:31 crc kubenswrapper[4753]: I1205 17:34:31.525079 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqpfr\" (UniqueName: \"kubernetes.io/projected/8e5b52fd-87d5-4912-a8b2-305d4271a5ba-kube-api-access-cqpfr\") pod \"glance-default-external-api-0\" (UID: \"8e5b52fd-87d5-4912-a8b2-305d4271a5ba\") " pod="openstack/glance-default-external-api-0" Dec 05 17:34:31 crc kubenswrapper[4753]: I1205 17:34:31.558914 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-8aedb3c5-dd54-4f7e-b1a5-0e458df9d312\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8aedb3c5-dd54-4f7e-b1a5-0e458df9d312\") pod \"glance-default-external-api-0\" (UID: \"8e5b52fd-87d5-4912-a8b2-305d4271a5ba\") " pod="openstack/glance-default-external-api-0" Dec 05 17:34:31 crc kubenswrapper[4753]: I1205 17:34:31.724085 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 17:34:31 crc kubenswrapper[4753]: I1205 17:34:31.739430 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ab469bdf-ba51-42a6-9f6b-ce772e850050" path="/var/lib/kubelet/pods/ab469bdf-ba51-42a6-9f6b-ce772e850050/volumes" Dec 05 17:34:32 crc kubenswrapper[4753]: I1205 17:34:32.230112 4753 generic.go:334] "Generic (PLEG): container finished" podID="70451c95-c6a0-474c-8707-3cb683987428" containerID="bd77eba2adb2e511c59a5571dd3458348425bcd9eafd7164e32a3404b65229bf" exitCode=143 Dec 05 17:34:32 crc kubenswrapper[4753]: I1205 17:34:32.230232 4753 generic.go:334] "Generic (PLEG): container finished" podID="70451c95-c6a0-474c-8707-3cb683987428" containerID="ec434f44447fcb1eb98dc000dff5ec92c6bab5e2f8fb624c8690344db0754130" exitCode=143 Dec 05 17:34:32 crc kubenswrapper[4753]: I1205 17:34:32.230279 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"70451c95-c6a0-474c-8707-3cb683987428","Type":"ContainerDied","Data":"bd77eba2adb2e511c59a5571dd3458348425bcd9eafd7164e32a3404b65229bf"} Dec 05 17:34:32 crc kubenswrapper[4753]: I1205 17:34:32.230316 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"70451c95-c6a0-474c-8707-3cb683987428","Type":"ContainerDied","Data":"ec434f44447fcb1eb98dc000dff5ec92c6bab5e2f8fb624c8690344db0754130"} Dec 05 17:34:32 crc kubenswrapper[4753]: I1205 17:34:32.231627 4753 generic.go:334] "Generic (PLEG): container finished" podID="5d74be7c-896f-4771-8d81-293a02d24cd5" containerID="62ed0226dbfaa3d50e7bb491688c2ee289e0378607a353b658f6c874aced0309" exitCode=0 Dec 05 17:34:32 crc kubenswrapper[4753]: I1205 17:34:32.231654 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-klc67" event={"ID":"5d74be7c-896f-4771-8d81-293a02d24cd5","Type":"ContainerDied","Data":"62ed0226dbfaa3d50e7bb491688c2ee289e0378607a353b658f6c874aced0309"} Dec 05 17:34:32 crc kubenswrapper[4753]: E1205 17:34:32.546816 4753 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0acfa75f_8372_4ccb_a91a_6741854cdd34.slice\": RecentStats: unable to find data in memory cache]" Dec 05 17:34:33 crc kubenswrapper[4753]: I1205 17:34:33.072424 4753 scope.go:117] "RemoveContainer" containerID="03fdac034900480e6278f063cf472d50acaf05a1e6be9e85c7e861a5647f82cf" Dec 05 17:34:33 crc kubenswrapper[4753]: I1205 17:34:33.192644 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-d47nm" Dec 05 17:34:33 crc kubenswrapper[4753]: I1205 17:34:33.217850 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c6486f24-9b1d-40a4-bfff-7126ab5b1ba4-scripts\") pod \"c6486f24-9b1d-40a4-bfff-7126ab5b1ba4\" (UID: \"c6486f24-9b1d-40a4-bfff-7126ab5b1ba4\") " Dec 05 17:34:33 crc kubenswrapper[4753]: I1205 17:34:33.217957 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c6486f24-9b1d-40a4-bfff-7126ab5b1ba4-logs\") pod \"c6486f24-9b1d-40a4-bfff-7126ab5b1ba4\" (UID: \"c6486f24-9b1d-40a4-bfff-7126ab5b1ba4\") " Dec 05 17:34:33 crc kubenswrapper[4753]: I1205 17:34:33.218021 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6486f24-9b1d-40a4-bfff-7126ab5b1ba4-combined-ca-bundle\") pod \"c6486f24-9b1d-40a4-bfff-7126ab5b1ba4\" (UID: \"c6486f24-9b1d-40a4-bfff-7126ab5b1ba4\") " Dec 05 17:34:33 crc kubenswrapper[4753]: I1205 17:34:33.218047 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kvmdq\" (UniqueName: \"kubernetes.io/projected/c6486f24-9b1d-40a4-bfff-7126ab5b1ba4-kube-api-access-kvmdq\") pod \"c6486f24-9b1d-40a4-bfff-7126ab5b1ba4\" (UID: \"c6486f24-9b1d-40a4-bfff-7126ab5b1ba4\") " Dec 05 17:34:33 crc kubenswrapper[4753]: I1205 17:34:33.218125 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6486f24-9b1d-40a4-bfff-7126ab5b1ba4-config-data\") pod \"c6486f24-9b1d-40a4-bfff-7126ab5b1ba4\" (UID: \"c6486f24-9b1d-40a4-bfff-7126ab5b1ba4\") " Dec 05 17:34:33 crc kubenswrapper[4753]: I1205 17:34:33.218970 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c6486f24-9b1d-40a4-bfff-7126ab5b1ba4-logs" (OuterVolumeSpecName: "logs") pod "c6486f24-9b1d-40a4-bfff-7126ab5b1ba4" (UID: "c6486f24-9b1d-40a4-bfff-7126ab5b1ba4"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:34:33 crc kubenswrapper[4753]: I1205 17:34:33.229287 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6486f24-9b1d-40a4-bfff-7126ab5b1ba4-scripts" (OuterVolumeSpecName: "scripts") pod "c6486f24-9b1d-40a4-bfff-7126ab5b1ba4" (UID: "c6486f24-9b1d-40a4-bfff-7126ab5b1ba4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:34:33 crc kubenswrapper[4753]: I1205 17:34:33.241175 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c6486f24-9b1d-40a4-bfff-7126ab5b1ba4-kube-api-access-kvmdq" (OuterVolumeSpecName: "kube-api-access-kvmdq") pod "c6486f24-9b1d-40a4-bfff-7126ab5b1ba4" (UID: "c6486f24-9b1d-40a4-bfff-7126ab5b1ba4"). InnerVolumeSpecName "kube-api-access-kvmdq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:34:33 crc kubenswrapper[4753]: I1205 17:34:33.270200 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6486f24-9b1d-40a4-bfff-7126ab5b1ba4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c6486f24-9b1d-40a4-bfff-7126ab5b1ba4" (UID: "c6486f24-9b1d-40a4-bfff-7126ab5b1ba4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:34:33 crc kubenswrapper[4753]: I1205 17:34:33.277039 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6486f24-9b1d-40a4-bfff-7126ab5b1ba4-config-data" (OuterVolumeSpecName: "config-data") pod "c6486f24-9b1d-40a4-bfff-7126ab5b1ba4" (UID: "c6486f24-9b1d-40a4-bfff-7126ab5b1ba4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:34:33 crc kubenswrapper[4753]: I1205 17:34:33.288369 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-d47nm" event={"ID":"c6486f24-9b1d-40a4-bfff-7126ab5b1ba4","Type":"ContainerDied","Data":"c3a40add4428256bb981cae972b2f8d931e760e5565dfcaa08a3b126d52afd48"} Dec 05 17:34:33 crc kubenswrapper[4753]: I1205 17:34:33.288410 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c3a40add4428256bb981cae972b2f8d931e760e5565dfcaa08a3b126d52afd48" Dec 05 17:34:33 crc kubenswrapper[4753]: I1205 17:34:33.288410 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-d47nm" Dec 05 17:34:33 crc kubenswrapper[4753]: I1205 17:34:33.320689 4753 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6486f24-9b1d-40a4-bfff-7126ab5b1ba4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:34:33 crc kubenswrapper[4753]: I1205 17:34:33.320729 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kvmdq\" (UniqueName: \"kubernetes.io/projected/c6486f24-9b1d-40a4-bfff-7126ab5b1ba4-kube-api-access-kvmdq\") on node \"crc\" DevicePath \"\"" Dec 05 17:34:33 crc kubenswrapper[4753]: I1205 17:34:33.320740 4753 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c6486f24-9b1d-40a4-bfff-7126ab5b1ba4-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:34:33 crc kubenswrapper[4753]: I1205 17:34:33.320750 4753 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c6486f24-9b1d-40a4-bfff-7126ab5b1ba4-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:34:33 crc kubenswrapper[4753]: I1205 17:34:33.320760 4753 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c6486f24-9b1d-40a4-bfff-7126ab5b1ba4-logs\") on node \"crc\" DevicePath \"\"" Dec 05 17:34:34 crc kubenswrapper[4753]: I1205 17:34:34.311723 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-56466dc556-cvwd4"] Dec 05 17:34:34 crc kubenswrapper[4753]: E1205 17:34:34.312530 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6486f24-9b1d-40a4-bfff-7126ab5b1ba4" containerName="placement-db-sync" Dec 05 17:34:34 crc kubenswrapper[4753]: I1205 17:34:34.312554 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6486f24-9b1d-40a4-bfff-7126ab5b1ba4" containerName="placement-db-sync" Dec 05 17:34:34 crc kubenswrapper[4753]: I1205 17:34:34.312858 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6486f24-9b1d-40a4-bfff-7126ab5b1ba4" containerName="placement-db-sync" Dec 05 17:34:34 crc kubenswrapper[4753]: I1205 17:34:34.317606 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-56466dc556-cvwd4" Dec 05 17:34:34 crc kubenswrapper[4753]: I1205 17:34:34.320318 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 05 17:34:34 crc kubenswrapper[4753]: I1205 17:34:34.320384 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 05 17:34:34 crc kubenswrapper[4753]: I1205 17:34:34.323410 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Dec 05 17:34:34 crc kubenswrapper[4753]: I1205 17:34:34.323418 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-fvnlc" Dec 05 17:34:34 crc kubenswrapper[4753]: I1205 17:34:34.323465 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Dec 05 17:34:34 crc kubenswrapper[4753]: I1205 17:34:34.326230 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-56466dc556-cvwd4"] Dec 05 17:34:34 crc kubenswrapper[4753]: I1205 17:34:34.341915 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ttjvc\" (UniqueName: \"kubernetes.io/projected/2b9f9c99-c7ba-4689-8218-f61fecf29867-kube-api-access-ttjvc\") pod \"placement-56466dc556-cvwd4\" (UID: \"2b9f9c99-c7ba-4689-8218-f61fecf29867\") " pod="openstack/placement-56466dc556-cvwd4" Dec 05 17:34:34 crc kubenswrapper[4753]: I1205 17:34:34.342044 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2b9f9c99-c7ba-4689-8218-f61fecf29867-scripts\") pod \"placement-56466dc556-cvwd4\" (UID: \"2b9f9c99-c7ba-4689-8218-f61fecf29867\") " pod="openstack/placement-56466dc556-cvwd4" Dec 05 17:34:34 crc kubenswrapper[4753]: I1205 17:34:34.342075 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2b9f9c99-c7ba-4689-8218-f61fecf29867-logs\") pod \"placement-56466dc556-cvwd4\" (UID: \"2b9f9c99-c7ba-4689-8218-f61fecf29867\") " pod="openstack/placement-56466dc556-cvwd4" Dec 05 17:34:34 crc kubenswrapper[4753]: I1205 17:34:34.342095 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2b9f9c99-c7ba-4689-8218-f61fecf29867-public-tls-certs\") pod \"placement-56466dc556-cvwd4\" (UID: \"2b9f9c99-c7ba-4689-8218-f61fecf29867\") " pod="openstack/placement-56466dc556-cvwd4" Dec 05 17:34:34 crc kubenswrapper[4753]: I1205 17:34:34.342123 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b9f9c99-c7ba-4689-8218-f61fecf29867-combined-ca-bundle\") pod \"placement-56466dc556-cvwd4\" (UID: \"2b9f9c99-c7ba-4689-8218-f61fecf29867\") " pod="openstack/placement-56466dc556-cvwd4" Dec 05 17:34:34 crc kubenswrapper[4753]: I1205 17:34:34.342165 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2b9f9c99-c7ba-4689-8218-f61fecf29867-internal-tls-certs\") pod \"placement-56466dc556-cvwd4\" (UID: \"2b9f9c99-c7ba-4689-8218-f61fecf29867\") " pod="openstack/placement-56466dc556-cvwd4" Dec 05 17:34:34 crc kubenswrapper[4753]: I1205 17:34:34.342186 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b9f9c99-c7ba-4689-8218-f61fecf29867-config-data\") pod \"placement-56466dc556-cvwd4\" (UID: \"2b9f9c99-c7ba-4689-8218-f61fecf29867\") " pod="openstack/placement-56466dc556-cvwd4" Dec 05 17:34:34 crc kubenswrapper[4753]: I1205 17:34:34.444219 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2b9f9c99-c7ba-4689-8218-f61fecf29867-scripts\") pod \"placement-56466dc556-cvwd4\" (UID: \"2b9f9c99-c7ba-4689-8218-f61fecf29867\") " pod="openstack/placement-56466dc556-cvwd4" Dec 05 17:34:34 crc kubenswrapper[4753]: I1205 17:34:34.445679 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2b9f9c99-c7ba-4689-8218-f61fecf29867-logs\") pod \"placement-56466dc556-cvwd4\" (UID: \"2b9f9c99-c7ba-4689-8218-f61fecf29867\") " pod="openstack/placement-56466dc556-cvwd4" Dec 05 17:34:34 crc kubenswrapper[4753]: I1205 17:34:34.445830 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2b9f9c99-c7ba-4689-8218-f61fecf29867-public-tls-certs\") pod \"placement-56466dc556-cvwd4\" (UID: \"2b9f9c99-c7ba-4689-8218-f61fecf29867\") " pod="openstack/placement-56466dc556-cvwd4" Dec 05 17:34:34 crc kubenswrapper[4753]: I1205 17:34:34.445966 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b9f9c99-c7ba-4689-8218-f61fecf29867-combined-ca-bundle\") pod \"placement-56466dc556-cvwd4\" (UID: \"2b9f9c99-c7ba-4689-8218-f61fecf29867\") " pod="openstack/placement-56466dc556-cvwd4" Dec 05 17:34:34 crc kubenswrapper[4753]: I1205 17:34:34.446093 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2b9f9c99-c7ba-4689-8218-f61fecf29867-internal-tls-certs\") pod \"placement-56466dc556-cvwd4\" (UID: \"2b9f9c99-c7ba-4689-8218-f61fecf29867\") " pod="openstack/placement-56466dc556-cvwd4" Dec 05 17:34:34 crc kubenswrapper[4753]: I1205 17:34:34.446235 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b9f9c99-c7ba-4689-8218-f61fecf29867-config-data\") pod \"placement-56466dc556-cvwd4\" (UID: \"2b9f9c99-c7ba-4689-8218-f61fecf29867\") " pod="openstack/placement-56466dc556-cvwd4" Dec 05 17:34:34 crc kubenswrapper[4753]: I1205 17:34:34.446471 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ttjvc\" (UniqueName: \"kubernetes.io/projected/2b9f9c99-c7ba-4689-8218-f61fecf29867-kube-api-access-ttjvc\") pod \"placement-56466dc556-cvwd4\" (UID: \"2b9f9c99-c7ba-4689-8218-f61fecf29867\") " pod="openstack/placement-56466dc556-cvwd4" Dec 05 17:34:34 crc kubenswrapper[4753]: I1205 17:34:34.446597 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2b9f9c99-c7ba-4689-8218-f61fecf29867-logs\") pod \"placement-56466dc556-cvwd4\" (UID: \"2b9f9c99-c7ba-4689-8218-f61fecf29867\") " pod="openstack/placement-56466dc556-cvwd4" Dec 05 17:34:34 crc kubenswrapper[4753]: I1205 17:34:34.457116 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2b9f9c99-c7ba-4689-8218-f61fecf29867-internal-tls-certs\") pod \"placement-56466dc556-cvwd4\" (UID: \"2b9f9c99-c7ba-4689-8218-f61fecf29867\") " pod="openstack/placement-56466dc556-cvwd4" Dec 05 17:34:34 crc kubenswrapper[4753]: I1205 17:34:34.457641 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2b9f9c99-c7ba-4689-8218-f61fecf29867-scripts\") pod \"placement-56466dc556-cvwd4\" (UID: \"2b9f9c99-c7ba-4689-8218-f61fecf29867\") " pod="openstack/placement-56466dc556-cvwd4" Dec 05 17:34:34 crc kubenswrapper[4753]: I1205 17:34:34.495457 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b9f9c99-c7ba-4689-8218-f61fecf29867-combined-ca-bundle\") pod \"placement-56466dc556-cvwd4\" (UID: \"2b9f9c99-c7ba-4689-8218-f61fecf29867\") " pod="openstack/placement-56466dc556-cvwd4" Dec 05 17:34:34 crc kubenswrapper[4753]: I1205 17:34:34.496191 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b9f9c99-c7ba-4689-8218-f61fecf29867-config-data\") pod \"placement-56466dc556-cvwd4\" (UID: \"2b9f9c99-c7ba-4689-8218-f61fecf29867\") " pod="openstack/placement-56466dc556-cvwd4" Dec 05 17:34:34 crc kubenswrapper[4753]: I1205 17:34:34.496976 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2b9f9c99-c7ba-4689-8218-f61fecf29867-public-tls-certs\") pod \"placement-56466dc556-cvwd4\" (UID: \"2b9f9c99-c7ba-4689-8218-f61fecf29867\") " pod="openstack/placement-56466dc556-cvwd4" Dec 05 17:34:34 crc kubenswrapper[4753]: I1205 17:34:34.502694 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ttjvc\" (UniqueName: \"kubernetes.io/projected/2b9f9c99-c7ba-4689-8218-f61fecf29867-kube-api-access-ttjvc\") pod \"placement-56466dc556-cvwd4\" (UID: \"2b9f9c99-c7ba-4689-8218-f61fecf29867\") " pod="openstack/placement-56466dc556-cvwd4" Dec 05 17:34:34 crc kubenswrapper[4753]: I1205 17:34:34.662755 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-56466dc556-cvwd4" Dec 05 17:34:34 crc kubenswrapper[4753]: I1205 17:34:34.729717 4753 scope.go:117] "RemoveContainer" containerID="87682a74661e693e498cd793cc20d16fc9f4a3b8a1a6b54f10285e2dcd15eafd" Dec 05 17:34:34 crc kubenswrapper[4753]: E1205 17:34:34.729951 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 17:34:36 crc kubenswrapper[4753]: I1205 17:34:36.240428 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5ccc5c4795-stl2q" Dec 05 17:34:36 crc kubenswrapper[4753]: I1205 17:34:36.302954 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-fcfdd6f9f-p8h8t"] Dec 05 17:34:36 crc kubenswrapper[4753]: I1205 17:34:36.303223 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-fcfdd6f9f-p8h8t" podUID="914bdf90-d9e9-4a6c-8c20-2512642669e1" containerName="dnsmasq-dns" containerID="cri-o://8fb4d1147ec040d5bba0a4a0bf4016823a8ff1debbe146223f8e94acb9d27c08" gracePeriod=10 Dec 05 17:34:36 crc kubenswrapper[4753]: I1205 17:34:36.953243 4753 scope.go:117] "RemoveContainer" containerID="1df7342aa1587e36dbf632c5369b179fe7f4e7a22c024804d46876a5f5ffba38" Dec 05 17:34:36 crc kubenswrapper[4753]: E1205 17:34:36.962448 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1df7342aa1587e36dbf632c5369b179fe7f4e7a22c024804d46876a5f5ffba38\": container with ID starting with 1df7342aa1587e36dbf632c5369b179fe7f4e7a22c024804d46876a5f5ffba38 not found: ID does not exist" containerID="1df7342aa1587e36dbf632c5369b179fe7f4e7a22c024804d46876a5f5ffba38" Dec 05 17:34:36 crc kubenswrapper[4753]: I1205 17:34:36.962488 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1df7342aa1587e36dbf632c5369b179fe7f4e7a22c024804d46876a5f5ffba38"} err="failed to get container status \"1df7342aa1587e36dbf632c5369b179fe7f4e7a22c024804d46876a5f5ffba38\": rpc error: code = NotFound desc = could not find container \"1df7342aa1587e36dbf632c5369b179fe7f4e7a22c024804d46876a5f5ffba38\": container with ID starting with 1df7342aa1587e36dbf632c5369b179fe7f4e7a22c024804d46876a5f5ffba38 not found: ID does not exist" Dec 05 17:34:36 crc kubenswrapper[4753]: I1205 17:34:36.962513 4753 scope.go:117] "RemoveContainer" containerID="03fdac034900480e6278f063cf472d50acaf05a1e6be9e85c7e861a5647f82cf" Dec 05 17:34:36 crc kubenswrapper[4753]: E1205 17:34:36.963401 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"03fdac034900480e6278f063cf472d50acaf05a1e6be9e85c7e861a5647f82cf\": container with ID starting with 03fdac034900480e6278f063cf472d50acaf05a1e6be9e85c7e861a5647f82cf not found: ID does not exist" containerID="03fdac034900480e6278f063cf472d50acaf05a1e6be9e85c7e861a5647f82cf" Dec 05 17:34:36 crc kubenswrapper[4753]: I1205 17:34:36.963451 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"03fdac034900480e6278f063cf472d50acaf05a1e6be9e85c7e861a5647f82cf"} err="failed to get container status \"03fdac034900480e6278f063cf472d50acaf05a1e6be9e85c7e861a5647f82cf\": rpc error: code = NotFound desc = could not find container \"03fdac034900480e6278f063cf472d50acaf05a1e6be9e85c7e861a5647f82cf\": container with ID starting with 03fdac034900480e6278f063cf472d50acaf05a1e6be9e85c7e861a5647f82cf not found: ID does not exist" Dec 05 17:34:36 crc kubenswrapper[4753]: I1205 17:34:36.963479 4753 scope.go:117] "RemoveContainer" containerID="1df7342aa1587e36dbf632c5369b179fe7f4e7a22c024804d46876a5f5ffba38" Dec 05 17:34:36 crc kubenswrapper[4753]: I1205 17:34:36.963878 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1df7342aa1587e36dbf632c5369b179fe7f4e7a22c024804d46876a5f5ffba38"} err="failed to get container status \"1df7342aa1587e36dbf632c5369b179fe7f4e7a22c024804d46876a5f5ffba38\": rpc error: code = NotFound desc = could not find container \"1df7342aa1587e36dbf632c5369b179fe7f4e7a22c024804d46876a5f5ffba38\": container with ID starting with 1df7342aa1587e36dbf632c5369b179fe7f4e7a22c024804d46876a5f5ffba38 not found: ID does not exist" Dec 05 17:34:36 crc kubenswrapper[4753]: I1205 17:34:36.963897 4753 scope.go:117] "RemoveContainer" containerID="03fdac034900480e6278f063cf472d50acaf05a1e6be9e85c7e861a5647f82cf" Dec 05 17:34:36 crc kubenswrapper[4753]: I1205 17:34:36.966221 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"03fdac034900480e6278f063cf472d50acaf05a1e6be9e85c7e861a5647f82cf"} err="failed to get container status \"03fdac034900480e6278f063cf472d50acaf05a1e6be9e85c7e861a5647f82cf\": rpc error: code = NotFound desc = could not find container \"03fdac034900480e6278f063cf472d50acaf05a1e6be9e85c7e861a5647f82cf\": container with ID starting with 03fdac034900480e6278f063cf472d50acaf05a1e6be9e85c7e861a5647f82cf not found: ID does not exist" Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.076393 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.082132 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-klc67" Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.116680 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wbfn9\" (UniqueName: \"kubernetes.io/projected/70451c95-c6a0-474c-8707-3cb683987428-kube-api-access-wbfn9\") pod \"70451c95-c6a0-474c-8707-3cb683987428\" (UID: \"70451c95-c6a0-474c-8707-3cb683987428\") " Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.116830 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/70451c95-c6a0-474c-8707-3cb683987428-scripts\") pod \"70451c95-c6a0-474c-8707-3cb683987428\" (UID: \"70451c95-c6a0-474c-8707-3cb683987428\") " Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.116849 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70451c95-c6a0-474c-8707-3cb683987428-combined-ca-bundle\") pod \"70451c95-c6a0-474c-8707-3cb683987428\" (UID: \"70451c95-c6a0-474c-8707-3cb683987428\") " Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.116888 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5d74be7c-896f-4771-8d81-293a02d24cd5-scripts\") pod \"5d74be7c-896f-4771-8d81-293a02d24cd5\" (UID: \"5d74be7c-896f-4771-8d81-293a02d24cd5\") " Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.116918 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/5d74be7c-896f-4771-8d81-293a02d24cd5-fernet-keys\") pod \"5d74be7c-896f-4771-8d81-293a02d24cd5\" (UID: \"5d74be7c-896f-4771-8d81-293a02d24cd5\") " Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.116968 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d74be7c-896f-4771-8d81-293a02d24cd5-config-data\") pod \"5d74be7c-896f-4771-8d81-293a02d24cd5\" (UID: \"5d74be7c-896f-4771-8d81-293a02d24cd5\") " Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.116988 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70451c95-c6a0-474c-8707-3cb683987428-config-data\") pod \"70451c95-c6a0-474c-8707-3cb683987428\" (UID: \"70451c95-c6a0-474c-8707-3cb683987428\") " Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.117025 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/5d74be7c-896f-4771-8d81-293a02d24cd5-credential-keys\") pod \"5d74be7c-896f-4771-8d81-293a02d24cd5\" (UID: \"5d74be7c-896f-4771-8d81-293a02d24cd5\") " Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.117143 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6f7bb1ce-2b18-4809-8632-596a297f364b\") pod \"70451c95-c6a0-474c-8707-3cb683987428\" (UID: \"70451c95-c6a0-474c-8707-3cb683987428\") " Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.117179 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/70451c95-c6a0-474c-8707-3cb683987428-logs\") pod \"70451c95-c6a0-474c-8707-3cb683987428\" (UID: \"70451c95-c6a0-474c-8707-3cb683987428\") " Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.117199 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/70451c95-c6a0-474c-8707-3cb683987428-httpd-run\") pod \"70451c95-c6a0-474c-8707-3cb683987428\" (UID: \"70451c95-c6a0-474c-8707-3cb683987428\") " Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.117223 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s5xr5\" (UniqueName: \"kubernetes.io/projected/5d74be7c-896f-4771-8d81-293a02d24cd5-kube-api-access-s5xr5\") pod \"5d74be7c-896f-4771-8d81-293a02d24cd5\" (UID: \"5d74be7c-896f-4771-8d81-293a02d24cd5\") " Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.117243 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d74be7c-896f-4771-8d81-293a02d24cd5-combined-ca-bundle\") pod \"5d74be7c-896f-4771-8d81-293a02d24cd5\" (UID: \"5d74be7c-896f-4771-8d81-293a02d24cd5\") " Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.120248 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/70451c95-c6a0-474c-8707-3cb683987428-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "70451c95-c6a0-474c-8707-3cb683987428" (UID: "70451c95-c6a0-474c-8707-3cb683987428"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.120586 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/70451c95-c6a0-474c-8707-3cb683987428-logs" (OuterVolumeSpecName: "logs") pod "70451c95-c6a0-474c-8707-3cb683987428" (UID: "70451c95-c6a0-474c-8707-3cb683987428"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.129242 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d74be7c-896f-4771-8d81-293a02d24cd5-scripts" (OuterVolumeSpecName: "scripts") pod "5d74be7c-896f-4771-8d81-293a02d24cd5" (UID: "5d74be7c-896f-4771-8d81-293a02d24cd5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.134079 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d74be7c-896f-4771-8d81-293a02d24cd5-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "5d74be7c-896f-4771-8d81-293a02d24cd5" (UID: "5d74be7c-896f-4771-8d81-293a02d24cd5"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.135995 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70451c95-c6a0-474c-8707-3cb683987428-scripts" (OuterVolumeSpecName: "scripts") pod "70451c95-c6a0-474c-8707-3cb683987428" (UID: "70451c95-c6a0-474c-8707-3cb683987428"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.139503 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5d74be7c-896f-4771-8d81-293a02d24cd5-kube-api-access-s5xr5" (OuterVolumeSpecName: "kube-api-access-s5xr5") pod "5d74be7c-896f-4771-8d81-293a02d24cd5" (UID: "5d74be7c-896f-4771-8d81-293a02d24cd5"). InnerVolumeSpecName "kube-api-access-s5xr5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.144972 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/70451c95-c6a0-474c-8707-3cb683987428-kube-api-access-wbfn9" (OuterVolumeSpecName: "kube-api-access-wbfn9") pod "70451c95-c6a0-474c-8707-3cb683987428" (UID: "70451c95-c6a0-474c-8707-3cb683987428"). InnerVolumeSpecName "kube-api-access-wbfn9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.146867 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d74be7c-896f-4771-8d81-293a02d24cd5-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "5d74be7c-896f-4771-8d81-293a02d24cd5" (UID: "5d74be7c-896f-4771-8d81-293a02d24cd5"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.186296 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d74be7c-896f-4771-8d81-293a02d24cd5-config-data" (OuterVolumeSpecName: "config-data") pod "5d74be7c-896f-4771-8d81-293a02d24cd5" (UID: "5d74be7c-896f-4771-8d81-293a02d24cd5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.216659 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6f7bb1ce-2b18-4809-8632-596a297f364b" (OuterVolumeSpecName: "glance") pod "70451c95-c6a0-474c-8707-3cb683987428" (UID: "70451c95-c6a0-474c-8707-3cb683987428"). InnerVolumeSpecName "pvc-6f7bb1ce-2b18-4809-8632-596a297f364b". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.220186 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wbfn9\" (UniqueName: \"kubernetes.io/projected/70451c95-c6a0-474c-8707-3cb683987428-kube-api-access-wbfn9\") on node \"crc\" DevicePath \"\"" Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.220212 4753 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/70451c95-c6a0-474c-8707-3cb683987428-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.220221 4753 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5d74be7c-896f-4771-8d81-293a02d24cd5-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.220230 4753 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/5d74be7c-896f-4771-8d81-293a02d24cd5-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.220242 4753 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d74be7c-896f-4771-8d81-293a02d24cd5-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.220251 4753 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/5d74be7c-896f-4771-8d81-293a02d24cd5-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.220283 4753 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-6f7bb1ce-2b18-4809-8632-596a297f364b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6f7bb1ce-2b18-4809-8632-596a297f364b\") on node \"crc\" " Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.220296 4753 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/70451c95-c6a0-474c-8707-3cb683987428-logs\") on node \"crc\" DevicePath \"\"" Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.220306 4753 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/70451c95-c6a0-474c-8707-3cb683987428-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.220314 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s5xr5\" (UniqueName: \"kubernetes.io/projected/5d74be7c-896f-4771-8d81-293a02d24cd5-kube-api-access-s5xr5\") on node \"crc\" DevicePath \"\"" Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.289672 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-fcfdd6f9f-p8h8t" Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.334615 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/914bdf90-d9e9-4a6c-8c20-2512642669e1-dns-swift-storage-0\") pod \"914bdf90-d9e9-4a6c-8c20-2512642669e1\" (UID: \"914bdf90-d9e9-4a6c-8c20-2512642669e1\") " Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.334655 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/914bdf90-d9e9-4a6c-8c20-2512642669e1-ovsdbserver-sb\") pod \"914bdf90-d9e9-4a6c-8c20-2512642669e1\" (UID: \"914bdf90-d9e9-4a6c-8c20-2512642669e1\") " Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.334683 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pw7zv\" (UniqueName: \"kubernetes.io/projected/914bdf90-d9e9-4a6c-8c20-2512642669e1-kube-api-access-pw7zv\") pod \"914bdf90-d9e9-4a6c-8c20-2512642669e1\" (UID: \"914bdf90-d9e9-4a6c-8c20-2512642669e1\") " Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.334761 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/914bdf90-d9e9-4a6c-8c20-2512642669e1-ovsdbserver-nb\") pod \"914bdf90-d9e9-4a6c-8c20-2512642669e1\" (UID: \"914bdf90-d9e9-4a6c-8c20-2512642669e1\") " Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.334887 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/914bdf90-d9e9-4a6c-8c20-2512642669e1-config\") pod \"914bdf90-d9e9-4a6c-8c20-2512642669e1\" (UID: \"914bdf90-d9e9-4a6c-8c20-2512642669e1\") " Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.334937 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/914bdf90-d9e9-4a6c-8c20-2512642669e1-dns-svc\") pod \"914bdf90-d9e9-4a6c-8c20-2512642669e1\" (UID: \"914bdf90-d9e9-4a6c-8c20-2512642669e1\") " Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.372955 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/914bdf90-d9e9-4a6c-8c20-2512642669e1-kube-api-access-pw7zv" (OuterVolumeSpecName: "kube-api-access-pw7zv") pod "914bdf90-d9e9-4a6c-8c20-2512642669e1" (UID: "914bdf90-d9e9-4a6c-8c20-2512642669e1"). InnerVolumeSpecName "kube-api-access-pw7zv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.395910 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-klc67" event={"ID":"5d74be7c-896f-4771-8d81-293a02d24cd5","Type":"ContainerDied","Data":"1eafc4f5615c5942ef5544b5628955f14da54dfc0bf8fc073455283ceec5afcf"} Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.395963 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1eafc4f5615c5942ef5544b5628955f14da54dfc0bf8fc073455283ceec5afcf" Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.396031 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-klc67" Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.398343 4753 generic.go:334] "Generic (PLEG): container finished" podID="914bdf90-d9e9-4a6c-8c20-2512642669e1" containerID="8fb4d1147ec040d5bba0a4a0bf4016823a8ff1debbe146223f8e94acb9d27c08" exitCode=0 Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.398695 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fcfdd6f9f-p8h8t" event={"ID":"914bdf90-d9e9-4a6c-8c20-2512642669e1","Type":"ContainerDied","Data":"8fb4d1147ec040d5bba0a4a0bf4016823a8ff1debbe146223f8e94acb9d27c08"} Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.398719 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fcfdd6f9f-p8h8t" event={"ID":"914bdf90-d9e9-4a6c-8c20-2512642669e1","Type":"ContainerDied","Data":"e1ff7227b5d0fe19466c00a3c27da19f7e2afaadb71baf44f70b3855cea4c1fe"} Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.398738 4753 scope.go:117] "RemoveContainer" containerID="8fb4d1147ec040d5bba0a4a0bf4016823a8ff1debbe146223f8e94acb9d27c08" Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.398833 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-fcfdd6f9f-p8h8t" Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.443849 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pw7zv\" (UniqueName: \"kubernetes.io/projected/914bdf90-d9e9-4a6c-8c20-2512642669e1-kube-api-access-pw7zv\") on node \"crc\" DevicePath \"\"" Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.472446 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"70451c95-c6a0-474c-8707-3cb683987428","Type":"ContainerDied","Data":"d12e9bb1a0a8e43894a8abe1e1e742ef508f672d39826822e2b8832ab2969779"} Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.472533 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.489347 4753 scope.go:117] "RemoveContainer" containerID="d4a8db04234feee1e6e730199c05a74aec436f6d81df70652a2891f8f3566af0" Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.528711 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70451c95-c6a0-474c-8707-3cb683987428-config-data" (OuterVolumeSpecName: "config-data") pod "70451c95-c6a0-474c-8707-3cb683987428" (UID: "70451c95-c6a0-474c-8707-3cb683987428"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.539357 4753 scope.go:117] "RemoveContainer" containerID="8fb4d1147ec040d5bba0a4a0bf4016823a8ff1debbe146223f8e94acb9d27c08" Dec 05 17:34:37 crc kubenswrapper[4753]: E1205 17:34:37.541129 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8fb4d1147ec040d5bba0a4a0bf4016823a8ff1debbe146223f8e94acb9d27c08\": container with ID starting with 8fb4d1147ec040d5bba0a4a0bf4016823a8ff1debbe146223f8e94acb9d27c08 not found: ID does not exist" containerID="8fb4d1147ec040d5bba0a4a0bf4016823a8ff1debbe146223f8e94acb9d27c08" Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.541178 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8fb4d1147ec040d5bba0a4a0bf4016823a8ff1debbe146223f8e94acb9d27c08"} err="failed to get container status \"8fb4d1147ec040d5bba0a4a0bf4016823a8ff1debbe146223f8e94acb9d27c08\": rpc error: code = NotFound desc = could not find container \"8fb4d1147ec040d5bba0a4a0bf4016823a8ff1debbe146223f8e94acb9d27c08\": container with ID starting with 8fb4d1147ec040d5bba0a4a0bf4016823a8ff1debbe146223f8e94acb9d27c08 not found: ID does not exist" Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.541202 4753 scope.go:117] "RemoveContainer" containerID="d4a8db04234feee1e6e730199c05a74aec436f6d81df70652a2891f8f3566af0" Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.545326 4753 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70451c95-c6a0-474c-8707-3cb683987428-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:34:37 crc kubenswrapper[4753]: E1205 17:34:37.547293 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d4a8db04234feee1e6e730199c05a74aec436f6d81df70652a2891f8f3566af0\": container with ID starting with d4a8db04234feee1e6e730199c05a74aec436f6d81df70652a2891f8f3566af0 not found: ID does not exist" containerID="d4a8db04234feee1e6e730199c05a74aec436f6d81df70652a2891f8f3566af0" Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.547319 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d4a8db04234feee1e6e730199c05a74aec436f6d81df70652a2891f8f3566af0"} err="failed to get container status \"d4a8db04234feee1e6e730199c05a74aec436f6d81df70652a2891f8f3566af0\": rpc error: code = NotFound desc = could not find container \"d4a8db04234feee1e6e730199c05a74aec436f6d81df70652a2891f8f3566af0\": container with ID starting with d4a8db04234feee1e6e730199c05a74aec436f6d81df70652a2891f8f3566af0 not found: ID does not exist" Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.547335 4753 scope.go:117] "RemoveContainer" containerID="bd77eba2adb2e511c59a5571dd3458348425bcd9eafd7164e32a3404b65229bf" Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.580817 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70451c95-c6a0-474c-8707-3cb683987428-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "70451c95-c6a0-474c-8707-3cb683987428" (UID: "70451c95-c6a0-474c-8707-3cb683987428"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.603427 4753 scope.go:117] "RemoveContainer" containerID="ec434f44447fcb1eb98dc000dff5ec92c6bab5e2f8fb624c8690344db0754130" Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.634580 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/914bdf90-d9e9-4a6c-8c20-2512642669e1-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "914bdf90-d9e9-4a6c-8c20-2512642669e1" (UID: "914bdf90-d9e9-4a6c-8c20-2512642669e1"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.637252 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 17:34:37 crc kubenswrapper[4753]: W1205 17:34:37.645296 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8e5b52fd_87d5_4912_a8b2_305d4271a5ba.slice/crio-db354364c39b9761b24def3ce625210c8f748702954e88f27a292043415b9c92 WatchSource:0}: Error finding container db354364c39b9761b24def3ce625210c8f748702954e88f27a292043415b9c92: Status 404 returned error can't find the container with id db354364c39b9761b24def3ce625210c8f748702954e88f27a292043415b9c92 Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.646808 4753 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/914bdf90-d9e9-4a6c-8c20-2512642669e1-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.646839 4753 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70451c95-c6a0-474c-8707-3cb683987428-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.659605 4753 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.659756 4753 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-6f7bb1ce-2b18-4809-8632-596a297f364b" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6f7bb1ce-2b18-4809-8632-596a297f364b") on node "crc" Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.715859 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d74be7c-896f-4771-8d81-293a02d24cd5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5d74be7c-896f-4771-8d81-293a02d24cd5" (UID: "5d74be7c-896f-4771-8d81-293a02d24cd5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.750176 4753 reconciler_common.go:293] "Volume detached for volume \"pvc-6f7bb1ce-2b18-4809-8632-596a297f364b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6f7bb1ce-2b18-4809-8632-596a297f364b\") on node \"crc\" DevicePath \"\"" Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.750732 4753 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d74be7c-896f-4771-8d81-293a02d24cd5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.775777 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/914bdf90-d9e9-4a6c-8c20-2512642669e1-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "914bdf90-d9e9-4a6c-8c20-2512642669e1" (UID: "914bdf90-d9e9-4a6c-8c20-2512642669e1"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.777689 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/914bdf90-d9e9-4a6c-8c20-2512642669e1-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "914bdf90-d9e9-4a6c-8c20-2512642669e1" (UID: "914bdf90-d9e9-4a6c-8c20-2512642669e1"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.790207 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-56466dc556-cvwd4"] Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.856137 4753 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/914bdf90-d9e9-4a6c-8c20-2512642669e1-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.856207 4753 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/914bdf90-d9e9-4a6c-8c20-2512642669e1-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.877270 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/914bdf90-d9e9-4a6c-8c20-2512642669e1-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "914bdf90-d9e9-4a6c-8c20-2512642669e1" (UID: "914bdf90-d9e9-4a6c-8c20-2512642669e1"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.884956 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/914bdf90-d9e9-4a6c-8c20-2512642669e1-config" (OuterVolumeSpecName: "config") pod "914bdf90-d9e9-4a6c-8c20-2512642669e1" (UID: "914bdf90-d9e9-4a6c-8c20-2512642669e1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.958734 4753 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/914bdf90-d9e9-4a6c-8c20-2512642669e1-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:34:37 crc kubenswrapper[4753]: I1205 17:34:37.958770 4753 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/914bdf90-d9e9-4a6c-8c20-2512642669e1-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.143514 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.166949 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.179894 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 17:34:38 crc kubenswrapper[4753]: E1205 17:34:38.180440 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d74be7c-896f-4771-8d81-293a02d24cd5" containerName="keystone-bootstrap" Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.180459 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d74be7c-896f-4771-8d81-293a02d24cd5" containerName="keystone-bootstrap" Dec 05 17:34:38 crc kubenswrapper[4753]: E1205 17:34:38.180498 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70451c95-c6a0-474c-8707-3cb683987428" containerName="glance-httpd" Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.180504 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="70451c95-c6a0-474c-8707-3cb683987428" containerName="glance-httpd" Dec 05 17:34:38 crc kubenswrapper[4753]: E1205 17:34:38.180520 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70451c95-c6a0-474c-8707-3cb683987428" containerName="glance-log" Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.180526 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="70451c95-c6a0-474c-8707-3cb683987428" containerName="glance-log" Dec 05 17:34:38 crc kubenswrapper[4753]: E1205 17:34:38.180536 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="914bdf90-d9e9-4a6c-8c20-2512642669e1" containerName="init" Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.180541 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="914bdf90-d9e9-4a6c-8c20-2512642669e1" containerName="init" Dec 05 17:34:38 crc kubenswrapper[4753]: E1205 17:34:38.180562 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="914bdf90-d9e9-4a6c-8c20-2512642669e1" containerName="dnsmasq-dns" Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.180568 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="914bdf90-d9e9-4a6c-8c20-2512642669e1" containerName="dnsmasq-dns" Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.180743 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="914bdf90-d9e9-4a6c-8c20-2512642669e1" containerName="dnsmasq-dns" Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.180757 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="5d74be7c-896f-4771-8d81-293a02d24cd5" containerName="keystone-bootstrap" Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.180772 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="70451c95-c6a0-474c-8707-3cb683987428" containerName="glance-httpd" Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.180786 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="70451c95-c6a0-474c-8707-3cb683987428" containerName="glance-log" Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.181899 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.188102 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.188329 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.192483 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-fcfdd6f9f-p8h8t"] Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.200270 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-fcfdd6f9f-p8h8t"] Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.211641 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.258633 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-578b67ddb8-fsb8m"] Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.278359 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-578b67ddb8-fsb8m" Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.280061 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-578b67ddb8-fsb8m"] Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.282049 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.282776 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.282897 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.283006 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-jfr42" Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.283106 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.283289 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.368415 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/870a2370-eaf3-4682-9bf9-712d62c24e28-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"870a2370-eaf3-4682-9bf9-712d62c24e28\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.368768 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nxh7m\" (UniqueName: \"kubernetes.io/projected/870a2370-eaf3-4682-9bf9-712d62c24e28-kube-api-access-nxh7m\") pod \"glance-default-internal-api-0\" (UID: \"870a2370-eaf3-4682-9bf9-712d62c24e28\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.368888 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-6f7bb1ce-2b18-4809-8632-596a297f364b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6f7bb1ce-2b18-4809-8632-596a297f364b\") pod \"glance-default-internal-api-0\" (UID: \"870a2370-eaf3-4682-9bf9-712d62c24e28\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.368921 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/870a2370-eaf3-4682-9bf9-712d62c24e28-config-data\") pod \"glance-default-internal-api-0\" (UID: \"870a2370-eaf3-4682-9bf9-712d62c24e28\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.368965 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/870a2370-eaf3-4682-9bf9-712d62c24e28-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"870a2370-eaf3-4682-9bf9-712d62c24e28\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.368988 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/870a2370-eaf3-4682-9bf9-712d62c24e28-logs\") pod \"glance-default-internal-api-0\" (UID: \"870a2370-eaf3-4682-9bf9-712d62c24e28\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.369014 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/870a2370-eaf3-4682-9bf9-712d62c24e28-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"870a2370-eaf3-4682-9bf9-712d62c24e28\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.369036 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/870a2370-eaf3-4682-9bf9-712d62c24e28-scripts\") pod \"glance-default-internal-api-0\" (UID: \"870a2370-eaf3-4682-9bf9-712d62c24e28\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.471142 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e39f06b-3be0-4d99-a8b8-627de083ff81-config-data\") pod \"keystone-578b67ddb8-fsb8m\" (UID: \"4e39f06b-3be0-4d99-a8b8-627de083ff81\") " pod="openstack/keystone-578b67ddb8-fsb8m" Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.471217 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/870a2370-eaf3-4682-9bf9-712d62c24e28-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"870a2370-eaf3-4682-9bf9-712d62c24e28\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.471248 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e39f06b-3be0-4d99-a8b8-627de083ff81-combined-ca-bundle\") pod \"keystone-578b67ddb8-fsb8m\" (UID: \"4e39f06b-3be0-4d99-a8b8-627de083ff81\") " pod="openstack/keystone-578b67ddb8-fsb8m" Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.471273 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nxh7m\" (UniqueName: \"kubernetes.io/projected/870a2370-eaf3-4682-9bf9-712d62c24e28-kube-api-access-nxh7m\") pod \"glance-default-internal-api-0\" (UID: \"870a2370-eaf3-4682-9bf9-712d62c24e28\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.471367 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-6f7bb1ce-2b18-4809-8632-596a297f364b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6f7bb1ce-2b18-4809-8632-596a297f364b\") pod \"glance-default-internal-api-0\" (UID: \"870a2370-eaf3-4682-9bf9-712d62c24e28\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.471389 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e39f06b-3be0-4d99-a8b8-627de083ff81-scripts\") pod \"keystone-578b67ddb8-fsb8m\" (UID: \"4e39f06b-3be0-4d99-a8b8-627de083ff81\") " pod="openstack/keystone-578b67ddb8-fsb8m" Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.471410 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/870a2370-eaf3-4682-9bf9-712d62c24e28-config-data\") pod \"glance-default-internal-api-0\" (UID: \"870a2370-eaf3-4682-9bf9-712d62c24e28\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.471436 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ztjh6\" (UniqueName: \"kubernetes.io/projected/4e39f06b-3be0-4d99-a8b8-627de083ff81-kube-api-access-ztjh6\") pod \"keystone-578b67ddb8-fsb8m\" (UID: \"4e39f06b-3be0-4d99-a8b8-627de083ff81\") " pod="openstack/keystone-578b67ddb8-fsb8m" Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.471461 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4e39f06b-3be0-4d99-a8b8-627de083ff81-fernet-keys\") pod \"keystone-578b67ddb8-fsb8m\" (UID: \"4e39f06b-3be0-4d99-a8b8-627de083ff81\") " pod="openstack/keystone-578b67ddb8-fsb8m" Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.471479 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/870a2370-eaf3-4682-9bf9-712d62c24e28-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"870a2370-eaf3-4682-9bf9-712d62c24e28\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.471494 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/870a2370-eaf3-4682-9bf9-712d62c24e28-logs\") pod \"glance-default-internal-api-0\" (UID: \"870a2370-eaf3-4682-9bf9-712d62c24e28\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.471515 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e39f06b-3be0-4d99-a8b8-627de083ff81-internal-tls-certs\") pod \"keystone-578b67ddb8-fsb8m\" (UID: \"4e39f06b-3be0-4d99-a8b8-627de083ff81\") " pod="openstack/keystone-578b67ddb8-fsb8m" Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.471533 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/870a2370-eaf3-4682-9bf9-712d62c24e28-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"870a2370-eaf3-4682-9bf9-712d62c24e28\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.471556 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/870a2370-eaf3-4682-9bf9-712d62c24e28-scripts\") pod \"glance-default-internal-api-0\" (UID: \"870a2370-eaf3-4682-9bf9-712d62c24e28\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.471579 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4e39f06b-3be0-4d99-a8b8-627de083ff81-credential-keys\") pod \"keystone-578b67ddb8-fsb8m\" (UID: \"4e39f06b-3be0-4d99-a8b8-627de083ff81\") " pod="openstack/keystone-578b67ddb8-fsb8m" Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.471602 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e39f06b-3be0-4d99-a8b8-627de083ff81-public-tls-certs\") pod \"keystone-578b67ddb8-fsb8m\" (UID: \"4e39f06b-3be0-4d99-a8b8-627de083ff81\") " pod="openstack/keystone-578b67ddb8-fsb8m" Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.472749 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/870a2370-eaf3-4682-9bf9-712d62c24e28-logs\") pod \"glance-default-internal-api-0\" (UID: \"870a2370-eaf3-4682-9bf9-712d62c24e28\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.473481 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/870a2370-eaf3-4682-9bf9-712d62c24e28-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"870a2370-eaf3-4682-9bf9-712d62c24e28\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.479094 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/870a2370-eaf3-4682-9bf9-712d62c24e28-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"870a2370-eaf3-4682-9bf9-712d62c24e28\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.481094 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/870a2370-eaf3-4682-9bf9-712d62c24e28-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"870a2370-eaf3-4682-9bf9-712d62c24e28\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.482665 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/870a2370-eaf3-4682-9bf9-712d62c24e28-config-data\") pod \"glance-default-internal-api-0\" (UID: \"870a2370-eaf3-4682-9bf9-712d62c24e28\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.483861 4753 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.483894 4753 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-6f7bb1ce-2b18-4809-8632-596a297f364b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6f7bb1ce-2b18-4809-8632-596a297f364b\") pod \"glance-default-internal-api-0\" (UID: \"870a2370-eaf3-4682-9bf9-712d62c24e28\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/df7f219cf3246d8ce07ad766614ffac20d7c8f72baddde817ee73d8a655238aa/globalmount\"" pod="openstack/glance-default-internal-api-0" Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.484389 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/870a2370-eaf3-4682-9bf9-712d62c24e28-scripts\") pod \"glance-default-internal-api-0\" (UID: \"870a2370-eaf3-4682-9bf9-712d62c24e28\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.494361 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cffca222-0336-40c8-886f-247667322702","Type":"ContainerStarted","Data":"b0490a0fecfb0d034ea03e1168b2d4cfd167f436e290a0ebc77767227007f70f"} Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.497385 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nxh7m\" (UniqueName: \"kubernetes.io/projected/870a2370-eaf3-4682-9bf9-712d62c24e28-kube-api-access-nxh7m\") pod \"glance-default-internal-api-0\" (UID: \"870a2370-eaf3-4682-9bf9-712d62c24e28\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.501677 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-l429g" event={"ID":"562378ad-0bd6-4143-bc1e-331ec844e38f","Type":"ContainerStarted","Data":"db9b02981461df0ef06b5460767b1cc32682af21a68b9885dc337ec2d7def6b0"} Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.515559 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-56466dc556-cvwd4" event={"ID":"2b9f9c99-c7ba-4689-8218-f61fecf29867","Type":"ContainerStarted","Data":"e658654a2408bffc38a046fe3c098f56037cb7323a54c3009e909cda8bbb69c4"} Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.515598 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-56466dc556-cvwd4" event={"ID":"2b9f9c99-c7ba-4689-8218-f61fecf29867","Type":"ContainerStarted","Data":"ad930e7be2d8cbe0d036955c727e9847789b08d5c31e5c57f3976833ea6a0a85"} Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.524594 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-l429g" podStartSLOduration=3.981421194 podStartE2EDuration="51.524576414s" podCreationTimestamp="2025-12-05 17:33:47 +0000 UTC" firstStartedPulling="2025-12-05 17:33:49.475335037 +0000 UTC m=+1767.978442043" lastFinishedPulling="2025-12-05 17:34:37.018490257 +0000 UTC m=+1815.521597263" observedRunningTime="2025-12-05 17:34:38.521893308 +0000 UTC m=+1817.025000314" watchObservedRunningTime="2025-12-05 17:34:38.524576414 +0000 UTC m=+1817.027683420" Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.526103 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"8e5b52fd-87d5-4912-a8b2-305d4271a5ba","Type":"ContainerStarted","Data":"db354364c39b9761b24def3ce625210c8f748702954e88f27a292043415b9c92"} Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.572032 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-6f7bb1ce-2b18-4809-8632-596a297f364b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6f7bb1ce-2b18-4809-8632-596a297f364b\") pod \"glance-default-internal-api-0\" (UID: \"870a2370-eaf3-4682-9bf9-712d62c24e28\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.575401 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e39f06b-3be0-4d99-a8b8-627de083ff81-scripts\") pod \"keystone-578b67ddb8-fsb8m\" (UID: \"4e39f06b-3be0-4d99-a8b8-627de083ff81\") " pod="openstack/keystone-578b67ddb8-fsb8m" Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.575501 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ztjh6\" (UniqueName: \"kubernetes.io/projected/4e39f06b-3be0-4d99-a8b8-627de083ff81-kube-api-access-ztjh6\") pod \"keystone-578b67ddb8-fsb8m\" (UID: \"4e39f06b-3be0-4d99-a8b8-627de083ff81\") " pod="openstack/keystone-578b67ddb8-fsb8m" Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.575560 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4e39f06b-3be0-4d99-a8b8-627de083ff81-fernet-keys\") pod \"keystone-578b67ddb8-fsb8m\" (UID: \"4e39f06b-3be0-4d99-a8b8-627de083ff81\") " pod="openstack/keystone-578b67ddb8-fsb8m" Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.575628 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e39f06b-3be0-4d99-a8b8-627de083ff81-internal-tls-certs\") pod \"keystone-578b67ddb8-fsb8m\" (UID: \"4e39f06b-3be0-4d99-a8b8-627de083ff81\") " pod="openstack/keystone-578b67ddb8-fsb8m" Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.575697 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4e39f06b-3be0-4d99-a8b8-627de083ff81-credential-keys\") pod \"keystone-578b67ddb8-fsb8m\" (UID: \"4e39f06b-3be0-4d99-a8b8-627de083ff81\") " pod="openstack/keystone-578b67ddb8-fsb8m" Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.575764 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e39f06b-3be0-4d99-a8b8-627de083ff81-public-tls-certs\") pod \"keystone-578b67ddb8-fsb8m\" (UID: \"4e39f06b-3be0-4d99-a8b8-627de083ff81\") " pod="openstack/keystone-578b67ddb8-fsb8m" Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.575833 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e39f06b-3be0-4d99-a8b8-627de083ff81-config-data\") pod \"keystone-578b67ddb8-fsb8m\" (UID: \"4e39f06b-3be0-4d99-a8b8-627de083ff81\") " pod="openstack/keystone-578b67ddb8-fsb8m" Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.575902 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e39f06b-3be0-4d99-a8b8-627de083ff81-combined-ca-bundle\") pod \"keystone-578b67ddb8-fsb8m\" (UID: \"4e39f06b-3be0-4d99-a8b8-627de083ff81\") " pod="openstack/keystone-578b67ddb8-fsb8m" Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.587411 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4e39f06b-3be0-4d99-a8b8-627de083ff81-scripts\") pod \"keystone-578b67ddb8-fsb8m\" (UID: \"4e39f06b-3be0-4d99-a8b8-627de083ff81\") " pod="openstack/keystone-578b67ddb8-fsb8m" Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.587474 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4e39f06b-3be0-4d99-a8b8-627de083ff81-fernet-keys\") pod \"keystone-578b67ddb8-fsb8m\" (UID: \"4e39f06b-3be0-4d99-a8b8-627de083ff81\") " pod="openstack/keystone-578b67ddb8-fsb8m" Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.590027 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e39f06b-3be0-4d99-a8b8-627de083ff81-internal-tls-certs\") pod \"keystone-578b67ddb8-fsb8m\" (UID: \"4e39f06b-3be0-4d99-a8b8-627de083ff81\") " pod="openstack/keystone-578b67ddb8-fsb8m" Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.595690 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4e39f06b-3be0-4d99-a8b8-627de083ff81-credential-keys\") pod \"keystone-578b67ddb8-fsb8m\" (UID: \"4e39f06b-3be0-4d99-a8b8-627de083ff81\") " pod="openstack/keystone-578b67ddb8-fsb8m" Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.596222 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4e39f06b-3be0-4d99-a8b8-627de083ff81-public-tls-certs\") pod \"keystone-578b67ddb8-fsb8m\" (UID: \"4e39f06b-3be0-4d99-a8b8-627de083ff81\") " pod="openstack/keystone-578b67ddb8-fsb8m" Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.597444 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4e39f06b-3be0-4d99-a8b8-627de083ff81-combined-ca-bundle\") pod \"keystone-578b67ddb8-fsb8m\" (UID: \"4e39f06b-3be0-4d99-a8b8-627de083ff81\") " pod="openstack/keystone-578b67ddb8-fsb8m" Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.603679 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4e39f06b-3be0-4d99-a8b8-627de083ff81-config-data\") pod \"keystone-578b67ddb8-fsb8m\" (UID: \"4e39f06b-3be0-4d99-a8b8-627de083ff81\") " pod="openstack/keystone-578b67ddb8-fsb8m" Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.605570 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ztjh6\" (UniqueName: \"kubernetes.io/projected/4e39f06b-3be0-4d99-a8b8-627de083ff81-kube-api-access-ztjh6\") pod \"keystone-578b67ddb8-fsb8m\" (UID: \"4e39f06b-3be0-4d99-a8b8-627de083ff81\") " pod="openstack/keystone-578b67ddb8-fsb8m" Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.640105 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-578b67ddb8-fsb8m" Dec 05 17:34:38 crc kubenswrapper[4753]: I1205 17:34:38.835012 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 17:34:39 crc kubenswrapper[4753]: I1205 17:34:39.243598 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-578b67ddb8-fsb8m"] Dec 05 17:34:39 crc kubenswrapper[4753]: I1205 17:34:39.571346 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-578b67ddb8-fsb8m" event={"ID":"4e39f06b-3be0-4d99-a8b8-627de083ff81","Type":"ContainerStarted","Data":"3e43555c3ea5872d7851ef8edf518ea6faefb45529d2fd5b37f811acf9534eb1"} Dec 05 17:34:39 crc kubenswrapper[4753]: I1205 17:34:39.581970 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-56466dc556-cvwd4" event={"ID":"2b9f9c99-c7ba-4689-8218-f61fecf29867","Type":"ContainerStarted","Data":"9abf3785b1ff168afdd90a9e38e7c59db385b44b704e96d1d221108f5704e8da"} Dec 05 17:34:39 crc kubenswrapper[4753]: I1205 17:34:39.582160 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-56466dc556-cvwd4" Dec 05 17:34:39 crc kubenswrapper[4753]: I1205 17:34:39.582177 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-56466dc556-cvwd4" Dec 05 17:34:39 crc kubenswrapper[4753]: I1205 17:34:39.594976 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"8e5b52fd-87d5-4912-a8b2-305d4271a5ba","Type":"ContainerStarted","Data":"415e3d210ea73c9242e858919e7b9133f06ea6f24a8e1aa7e1aaa51f035458fd"} Dec 05 17:34:39 crc kubenswrapper[4753]: I1205 17:34:39.606518 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-f6c6z" event={"ID":"68a4a494-d4ff-43ee-a74c-4f0377d229ec","Type":"ContainerStarted","Data":"a05c9724617522c7f719a6bb68143d24b7811e88120000f953246afd4de857bc"} Dec 05 17:34:39 crc kubenswrapper[4753]: I1205 17:34:39.649939 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-56466dc556-cvwd4" podStartSLOduration=5.649918882 podStartE2EDuration="5.649918882s" podCreationTimestamp="2025-12-05 17:34:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:34:39.632163538 +0000 UTC m=+1818.135270544" watchObservedRunningTime="2025-12-05 17:34:39.649918882 +0000 UTC m=+1818.153025878" Dec 05 17:34:39 crc kubenswrapper[4753]: I1205 17:34:39.676671 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-f6c6z" podStartSLOduration=4.440856544 podStartE2EDuration="52.676647651s" podCreationTimestamp="2025-12-05 17:33:47 +0000 UTC" firstStartedPulling="2025-12-05 17:33:48.814604376 +0000 UTC m=+1767.317711382" lastFinishedPulling="2025-12-05 17:34:37.050395483 +0000 UTC m=+1815.553502489" observedRunningTime="2025-12-05 17:34:39.658705411 +0000 UTC m=+1818.161812427" watchObservedRunningTime="2025-12-05 17:34:39.676647651 +0000 UTC m=+1818.179754657" Dec 05 17:34:39 crc kubenswrapper[4753]: I1205 17:34:39.825779 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="70451c95-c6a0-474c-8707-3cb683987428" path="/var/lib/kubelet/pods/70451c95-c6a0-474c-8707-3cb683987428/volumes" Dec 05 17:34:39 crc kubenswrapper[4753]: I1205 17:34:39.827088 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="914bdf90-d9e9-4a6c-8c20-2512642669e1" path="/var/lib/kubelet/pods/914bdf90-d9e9-4a6c-8c20-2512642669e1/volumes" Dec 05 17:34:39 crc kubenswrapper[4753]: I1205 17:34:39.943214 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 17:34:39 crc kubenswrapper[4753]: W1205 17:34:39.986384 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod870a2370_eaf3_4682_9bf9_712d62c24e28.slice/crio-cbe7223ae8193ebc0e0ad1431c54cb3a41573407b1e7f2b3bd8a761d91e72011 WatchSource:0}: Error finding container cbe7223ae8193ebc0e0ad1431c54cb3a41573407b1e7f2b3bd8a761d91e72011: Status 404 returned error can't find the container with id cbe7223ae8193ebc0e0ad1431c54cb3a41573407b1e7f2b3bd8a761d91e72011 Dec 05 17:34:40 crc kubenswrapper[4753]: I1205 17:34:40.618320 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-578b67ddb8-fsb8m" event={"ID":"4e39f06b-3be0-4d99-a8b8-627de083ff81","Type":"ContainerStarted","Data":"e6cf2deec5429c9077ae967d5e3290fb22f31eaab647b6d1809212c5e20b1272"} Dec 05 17:34:40 crc kubenswrapper[4753]: I1205 17:34:40.619923 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-578b67ddb8-fsb8m" Dec 05 17:34:40 crc kubenswrapper[4753]: I1205 17:34:40.621404 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"870a2370-eaf3-4682-9bf9-712d62c24e28","Type":"ContainerStarted","Data":"cbe7223ae8193ebc0e0ad1431c54cb3a41573407b1e7f2b3bd8a761d91e72011"} Dec 05 17:34:40 crc kubenswrapper[4753]: I1205 17:34:40.623908 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"8e5b52fd-87d5-4912-a8b2-305d4271a5ba","Type":"ContainerStarted","Data":"31ff5943ec907ee59d90022c023858050de26c11bb52b8a711ff3910dd69ad61"} Dec 05 17:34:40 crc kubenswrapper[4753]: I1205 17:34:40.628913 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-sync-9vsjw" event={"ID":"f5711686-6b64-450f-b2b4-6583dab08275","Type":"ContainerStarted","Data":"56de0dcec4169c18c85433352029914ff2efd1c1bb8af2c7a6d4a87f80f63ae2"} Dec 05 17:34:40 crc kubenswrapper[4753]: I1205 17:34:40.653337 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-578b67ddb8-fsb8m" podStartSLOduration=2.653313127 podStartE2EDuration="2.653313127s" podCreationTimestamp="2025-12-05 17:34:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:34:40.643781316 +0000 UTC m=+1819.146888322" watchObservedRunningTime="2025-12-05 17:34:40.653313127 +0000 UTC m=+1819.156420133" Dec 05 17:34:40 crc kubenswrapper[4753]: I1205 17:34:40.671510 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-db-sync-9vsjw" podStartSLOduration=3.5566173599999997 podStartE2EDuration="53.671492203s" podCreationTimestamp="2025-12-05 17:33:47 +0000 UTC" firstStartedPulling="2025-12-05 17:33:49.376136108 +0000 UTC m=+1767.879243114" lastFinishedPulling="2025-12-05 17:34:39.491010951 +0000 UTC m=+1817.994117957" observedRunningTime="2025-12-05 17:34:40.65907501 +0000 UTC m=+1819.162182026" watchObservedRunningTime="2025-12-05 17:34:40.671492203 +0000 UTC m=+1819.174599209" Dec 05 17:34:40 crc kubenswrapper[4753]: I1205 17:34:40.708734 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=9.708713239 podStartE2EDuration="9.708713239s" podCreationTimestamp="2025-12-05 17:34:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:34:40.688174776 +0000 UTC m=+1819.191281782" watchObservedRunningTime="2025-12-05 17:34:40.708713239 +0000 UTC m=+1819.211820245" Dec 05 17:34:41 crc kubenswrapper[4753]: I1205 17:34:41.642955 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"870a2370-eaf3-4682-9bf9-712d62c24e28","Type":"ContainerStarted","Data":"1823306ef1c79c57969ca58985ac954e1e0ec367d52bab894774055565525547"} Dec 05 17:34:41 crc kubenswrapper[4753]: I1205 17:34:41.741777 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 05 17:34:41 crc kubenswrapper[4753]: I1205 17:34:41.741816 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 05 17:34:41 crc kubenswrapper[4753]: I1205 17:34:41.768042 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 05 17:34:41 crc kubenswrapper[4753]: I1205 17:34:41.793662 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 05 17:34:42 crc kubenswrapper[4753]: I1205 17:34:42.652955 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 05 17:34:42 crc kubenswrapper[4753]: I1205 17:34:42.653346 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 05 17:34:42 crc kubenswrapper[4753]: E1205 17:34:42.853656 4753 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0acfa75f_8372_4ccb_a91a_6741854cdd34.slice\": RecentStats: unable to find data in memory cache]" Dec 05 17:34:44 crc kubenswrapper[4753]: I1205 17:34:44.673206 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"870a2370-eaf3-4682-9bf9-712d62c24e28","Type":"ContainerStarted","Data":"bf901a23935a1fb981155a48242300c7930db807182884301ccbfb3a25e80644"} Dec 05 17:34:44 crc kubenswrapper[4753]: I1205 17:34:44.700201 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=6.700181704 podStartE2EDuration="6.700181704s" podCreationTimestamp="2025-12-05 17:34:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:34:44.694898284 +0000 UTC m=+1823.198005290" watchObservedRunningTime="2025-12-05 17:34:44.700181704 +0000 UTC m=+1823.203288720" Dec 05 17:34:47 crc kubenswrapper[4753]: I1205 17:34:47.701987 4753 generic.go:334] "Generic (PLEG): container finished" podID="562378ad-0bd6-4143-bc1e-331ec844e38f" containerID="db9b02981461df0ef06b5460767b1cc32682af21a68b9885dc337ec2d7def6b0" exitCode=0 Dec 05 17:34:47 crc kubenswrapper[4753]: I1205 17:34:47.702058 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-l429g" event={"ID":"562378ad-0bd6-4143-bc1e-331ec844e38f","Type":"ContainerDied","Data":"db9b02981461df0ef06b5460767b1cc32682af21a68b9885dc337ec2d7def6b0"} Dec 05 17:34:48 crc kubenswrapper[4753]: I1205 17:34:48.721424 4753 scope.go:117] "RemoveContainer" containerID="87682a74661e693e498cd793cc20d16fc9f4a3b8a1a6b54f10285e2dcd15eafd" Dec 05 17:34:48 crc kubenswrapper[4753]: E1205 17:34:48.721989 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 17:34:48 crc kubenswrapper[4753]: I1205 17:34:48.827210 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 05 17:34:48 crc kubenswrapper[4753]: I1205 17:34:48.836784 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 05 17:34:48 crc kubenswrapper[4753]: I1205 17:34:48.837965 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 05 17:34:48 crc kubenswrapper[4753]: I1205 17:34:48.838006 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 05 17:34:48 crc kubenswrapper[4753]: I1205 17:34:48.883716 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 05 17:34:48 crc kubenswrapper[4753]: I1205 17:34:48.930316 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 05 17:34:49 crc kubenswrapper[4753]: I1205 17:34:49.487771 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-l429g" Dec 05 17:34:49 crc kubenswrapper[4753]: I1205 17:34:49.557331 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/562378ad-0bd6-4143-bc1e-331ec844e38f-db-sync-config-data\") pod \"562378ad-0bd6-4143-bc1e-331ec844e38f\" (UID: \"562378ad-0bd6-4143-bc1e-331ec844e38f\") " Dec 05 17:34:49 crc kubenswrapper[4753]: I1205 17:34:49.557387 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/562378ad-0bd6-4143-bc1e-331ec844e38f-combined-ca-bundle\") pod \"562378ad-0bd6-4143-bc1e-331ec844e38f\" (UID: \"562378ad-0bd6-4143-bc1e-331ec844e38f\") " Dec 05 17:34:49 crc kubenswrapper[4753]: I1205 17:34:49.557434 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zc7jr\" (UniqueName: \"kubernetes.io/projected/562378ad-0bd6-4143-bc1e-331ec844e38f-kube-api-access-zc7jr\") pod \"562378ad-0bd6-4143-bc1e-331ec844e38f\" (UID: \"562378ad-0bd6-4143-bc1e-331ec844e38f\") " Dec 05 17:34:49 crc kubenswrapper[4753]: I1205 17:34:49.560737 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/562378ad-0bd6-4143-bc1e-331ec844e38f-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "562378ad-0bd6-4143-bc1e-331ec844e38f" (UID: "562378ad-0bd6-4143-bc1e-331ec844e38f"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:34:49 crc kubenswrapper[4753]: I1205 17:34:49.561082 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/562378ad-0bd6-4143-bc1e-331ec844e38f-kube-api-access-zc7jr" (OuterVolumeSpecName: "kube-api-access-zc7jr") pod "562378ad-0bd6-4143-bc1e-331ec844e38f" (UID: "562378ad-0bd6-4143-bc1e-331ec844e38f"). InnerVolumeSpecName "kube-api-access-zc7jr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:34:49 crc kubenswrapper[4753]: I1205 17:34:49.600636 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/562378ad-0bd6-4143-bc1e-331ec844e38f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "562378ad-0bd6-4143-bc1e-331ec844e38f" (UID: "562378ad-0bd6-4143-bc1e-331ec844e38f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:34:49 crc kubenswrapper[4753]: I1205 17:34:49.658971 4753 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/562378ad-0bd6-4143-bc1e-331ec844e38f-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:34:49 crc kubenswrapper[4753]: I1205 17:34:49.659989 4753 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/562378ad-0bd6-4143-bc1e-331ec844e38f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:34:49 crc kubenswrapper[4753]: I1205 17:34:49.660005 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zc7jr\" (UniqueName: \"kubernetes.io/projected/562378ad-0bd6-4143-bc1e-331ec844e38f-kube-api-access-zc7jr\") on node \"crc\" DevicePath \"\"" Dec 05 17:34:49 crc kubenswrapper[4753]: E1205 17:34:49.708029 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ceilometer-central-agent\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/ceilometer-0" podUID="cffca222-0336-40c8-886f-247667322702" Dec 05 17:34:49 crc kubenswrapper[4753]: I1205 17:34:49.723689 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-l429g" Dec 05 17:34:49 crc kubenswrapper[4753]: I1205 17:34:49.727565 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="cffca222-0336-40c8-886f-247667322702" containerName="ceilometer-notification-agent" containerID="cri-o://c5c0a4fafd3b40829436466d86fa7132ec845878079668fc5f2fb5a78a8c4968" gracePeriod=30 Dec 05 17:34:49 crc kubenswrapper[4753]: I1205 17:34:49.728204 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="cffca222-0336-40c8-886f-247667322702" containerName="proxy-httpd" containerID="cri-o://baf68e5c6a3f229094007acbd46cad9362cc7aeb4f9e7f97faacec3542aaf326" gracePeriod=30 Dec 05 17:34:49 crc kubenswrapper[4753]: I1205 17:34:49.728274 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="cffca222-0336-40c8-886f-247667322702" containerName="sg-core" containerID="cri-o://b0490a0fecfb0d034ea03e1168b2d4cfd167f436e290a0ebc77767227007f70f" gracePeriod=30 Dec 05 17:34:49 crc kubenswrapper[4753]: I1205 17:34:49.742663 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-l429g" event={"ID":"562378ad-0bd6-4143-bc1e-331ec844e38f","Type":"ContainerDied","Data":"2d7660b8d6b99d54905ec57f42a15d4279514535700237aff37e6d80ca2717c8"} Dec 05 17:34:49 crc kubenswrapper[4753]: I1205 17:34:49.742706 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2d7660b8d6b99d54905ec57f42a15d4279514535700237aff37e6d80ca2717c8" Dec 05 17:34:49 crc kubenswrapper[4753]: I1205 17:34:49.742722 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cffca222-0336-40c8-886f-247667322702","Type":"ContainerStarted","Data":"baf68e5c6a3f229094007acbd46cad9362cc7aeb4f9e7f97faacec3542aaf326"} Dec 05 17:34:49 crc kubenswrapper[4753]: I1205 17:34:49.742742 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 17:34:49 crc kubenswrapper[4753]: I1205 17:34:49.742760 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 05 17:34:49 crc kubenswrapper[4753]: I1205 17:34:49.742775 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.070070 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-69b574cd87-g8xmw"] Dec 05 17:34:50 crc kubenswrapper[4753]: E1205 17:34:50.070595 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="562378ad-0bd6-4143-bc1e-331ec844e38f" containerName="barbican-db-sync" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.070624 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="562378ad-0bd6-4143-bc1e-331ec844e38f" containerName="barbican-db-sync" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.070915 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="562378ad-0bd6-4143-bc1e-331ec844e38f" containerName="barbican-db-sync" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.072339 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-69b574cd87-g8xmw" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.077852 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.078079 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.078252 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-v5mm4" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.087140 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-58b544895d-h2wcr"] Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.104991 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-58b544895d-h2wcr" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.112635 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.130914 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-58b544895d-h2wcr"] Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.151925 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-69b574cd87-g8xmw"] Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.175990 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/491b4a49-e02d-41a2-b783-b3dddbedbc57-logs\") pod \"barbican-worker-69b574cd87-g8xmw\" (UID: \"491b4a49-e02d-41a2-b783-b3dddbedbc57\") " pod="openstack/barbican-worker-69b574cd87-g8xmw" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.176338 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/491b4a49-e02d-41a2-b783-b3dddbedbc57-combined-ca-bundle\") pod \"barbican-worker-69b574cd87-g8xmw\" (UID: \"491b4a49-e02d-41a2-b783-b3dddbedbc57\") " pod="openstack/barbican-worker-69b574cd87-g8xmw" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.176396 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/491b4a49-e02d-41a2-b783-b3dddbedbc57-config-data\") pod \"barbican-worker-69b574cd87-g8xmw\" (UID: \"491b4a49-e02d-41a2-b783-b3dddbedbc57\") " pod="openstack/barbican-worker-69b574cd87-g8xmw" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.176453 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/491b4a49-e02d-41a2-b783-b3dddbedbc57-config-data-custom\") pod \"barbican-worker-69b574cd87-g8xmw\" (UID: \"491b4a49-e02d-41a2-b783-b3dddbedbc57\") " pod="openstack/barbican-worker-69b574cd87-g8xmw" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.176532 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cpkgz\" (UniqueName: \"kubernetes.io/projected/491b4a49-e02d-41a2-b783-b3dddbedbc57-kube-api-access-cpkgz\") pod \"barbican-worker-69b574cd87-g8xmw\" (UID: \"491b4a49-e02d-41a2-b783-b3dddbedbc57\") " pod="openstack/barbican-worker-69b574cd87-g8xmw" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.212888 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-688c87cc99-gnhfq"] Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.214594 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-688c87cc99-gnhfq" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.249975 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-688c87cc99-gnhfq"] Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.278907 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/491b4a49-e02d-41a2-b783-b3dddbedbc57-config-data-custom\") pod \"barbican-worker-69b574cd87-g8xmw\" (UID: \"491b4a49-e02d-41a2-b783-b3dddbedbc57\") " pod="openstack/barbican-worker-69b574cd87-g8xmw" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.278962 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c272889e-62f7-4ce2-8a38-e15945d984d9-config-data\") pod \"barbican-keystone-listener-58b544895d-h2wcr\" (UID: \"c272889e-62f7-4ce2-8a38-e15945d984d9\") " pod="openstack/barbican-keystone-listener-58b544895d-h2wcr" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.279041 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c272889e-62f7-4ce2-8a38-e15945d984d9-combined-ca-bundle\") pod \"barbican-keystone-listener-58b544895d-h2wcr\" (UID: \"c272889e-62f7-4ce2-8a38-e15945d984d9\") " pod="openstack/barbican-keystone-listener-58b544895d-h2wcr" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.279090 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cpkgz\" (UniqueName: \"kubernetes.io/projected/491b4a49-e02d-41a2-b783-b3dddbedbc57-kube-api-access-cpkgz\") pod \"barbican-worker-69b574cd87-g8xmw\" (UID: \"491b4a49-e02d-41a2-b783-b3dddbedbc57\") " pod="openstack/barbican-worker-69b574cd87-g8xmw" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.279124 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6vv5g\" (UniqueName: \"kubernetes.io/projected/c272889e-62f7-4ce2-8a38-e15945d984d9-kube-api-access-6vv5g\") pod \"barbican-keystone-listener-58b544895d-h2wcr\" (UID: \"c272889e-62f7-4ce2-8a38-e15945d984d9\") " pod="openstack/barbican-keystone-listener-58b544895d-h2wcr" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.279203 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/491b4a49-e02d-41a2-b783-b3dddbedbc57-logs\") pod \"barbican-worker-69b574cd87-g8xmw\" (UID: \"491b4a49-e02d-41a2-b783-b3dddbedbc57\") " pod="openstack/barbican-worker-69b574cd87-g8xmw" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.279246 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/491b4a49-e02d-41a2-b783-b3dddbedbc57-combined-ca-bundle\") pod \"barbican-worker-69b574cd87-g8xmw\" (UID: \"491b4a49-e02d-41a2-b783-b3dddbedbc57\") " pod="openstack/barbican-worker-69b574cd87-g8xmw" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.279292 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c272889e-62f7-4ce2-8a38-e15945d984d9-logs\") pod \"barbican-keystone-listener-58b544895d-h2wcr\" (UID: \"c272889e-62f7-4ce2-8a38-e15945d984d9\") " pod="openstack/barbican-keystone-listener-58b544895d-h2wcr" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.279334 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c272889e-62f7-4ce2-8a38-e15945d984d9-config-data-custom\") pod \"barbican-keystone-listener-58b544895d-h2wcr\" (UID: \"c272889e-62f7-4ce2-8a38-e15945d984d9\") " pod="openstack/barbican-keystone-listener-58b544895d-h2wcr" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.279403 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/491b4a49-e02d-41a2-b783-b3dddbedbc57-config-data\") pod \"barbican-worker-69b574cd87-g8xmw\" (UID: \"491b4a49-e02d-41a2-b783-b3dddbedbc57\") " pod="openstack/barbican-worker-69b574cd87-g8xmw" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.296920 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/491b4a49-e02d-41a2-b783-b3dddbedbc57-config-data-custom\") pod \"barbican-worker-69b574cd87-g8xmw\" (UID: \"491b4a49-e02d-41a2-b783-b3dddbedbc57\") " pod="openstack/barbican-worker-69b574cd87-g8xmw" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.301012 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cpkgz\" (UniqueName: \"kubernetes.io/projected/491b4a49-e02d-41a2-b783-b3dddbedbc57-kube-api-access-cpkgz\") pod \"barbican-worker-69b574cd87-g8xmw\" (UID: \"491b4a49-e02d-41a2-b783-b3dddbedbc57\") " pod="openstack/barbican-worker-69b574cd87-g8xmw" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.302056 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/491b4a49-e02d-41a2-b783-b3dddbedbc57-logs\") pod \"barbican-worker-69b574cd87-g8xmw\" (UID: \"491b4a49-e02d-41a2-b783-b3dddbedbc57\") " pod="openstack/barbican-worker-69b574cd87-g8xmw" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.306265 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/491b4a49-e02d-41a2-b783-b3dddbedbc57-combined-ca-bundle\") pod \"barbican-worker-69b574cd87-g8xmw\" (UID: \"491b4a49-e02d-41a2-b783-b3dddbedbc57\") " pod="openstack/barbican-worker-69b574cd87-g8xmw" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.311905 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/491b4a49-e02d-41a2-b783-b3dddbedbc57-config-data\") pod \"barbican-worker-69b574cd87-g8xmw\" (UID: \"491b4a49-e02d-41a2-b783-b3dddbedbc57\") " pod="openstack/barbican-worker-69b574cd87-g8xmw" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.367128 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-66c6899d8d-whfkr"] Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.369555 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-66c6899d8d-whfkr" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.374085 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.382415 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/daeb7979-ef24-45e4-888f-a917ecbec75f-ovsdbserver-sb\") pod \"dnsmasq-dns-688c87cc99-gnhfq\" (UID: \"daeb7979-ef24-45e4-888f-a917ecbec75f\") " pod="openstack/dnsmasq-dns-688c87cc99-gnhfq" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.382487 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c272889e-62f7-4ce2-8a38-e15945d984d9-logs\") pod \"barbican-keystone-listener-58b544895d-h2wcr\" (UID: \"c272889e-62f7-4ce2-8a38-e15945d984d9\") " pod="openstack/barbican-keystone-listener-58b544895d-h2wcr" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.382517 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c272889e-62f7-4ce2-8a38-e15945d984d9-config-data-custom\") pod \"barbican-keystone-listener-58b544895d-h2wcr\" (UID: \"c272889e-62f7-4ce2-8a38-e15945d984d9\") " pod="openstack/barbican-keystone-listener-58b544895d-h2wcr" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.382536 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/daeb7979-ef24-45e4-888f-a917ecbec75f-config\") pod \"dnsmasq-dns-688c87cc99-gnhfq\" (UID: \"daeb7979-ef24-45e4-888f-a917ecbec75f\") " pod="openstack/dnsmasq-dns-688c87cc99-gnhfq" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.382569 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/daeb7979-ef24-45e4-888f-a917ecbec75f-dns-svc\") pod \"dnsmasq-dns-688c87cc99-gnhfq\" (UID: \"daeb7979-ef24-45e4-888f-a917ecbec75f\") " pod="openstack/dnsmasq-dns-688c87cc99-gnhfq" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.382601 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/daeb7979-ef24-45e4-888f-a917ecbec75f-dns-swift-storage-0\") pod \"dnsmasq-dns-688c87cc99-gnhfq\" (UID: \"daeb7979-ef24-45e4-888f-a917ecbec75f\") " pod="openstack/dnsmasq-dns-688c87cc99-gnhfq" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.382624 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/daeb7979-ef24-45e4-888f-a917ecbec75f-ovsdbserver-nb\") pod \"dnsmasq-dns-688c87cc99-gnhfq\" (UID: \"daeb7979-ef24-45e4-888f-a917ecbec75f\") " pod="openstack/dnsmasq-dns-688c87cc99-gnhfq" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.382660 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c272889e-62f7-4ce2-8a38-e15945d984d9-config-data\") pod \"barbican-keystone-listener-58b544895d-h2wcr\" (UID: \"c272889e-62f7-4ce2-8a38-e15945d984d9\") " pod="openstack/barbican-keystone-listener-58b544895d-h2wcr" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.382707 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c272889e-62f7-4ce2-8a38-e15945d984d9-combined-ca-bundle\") pod \"barbican-keystone-listener-58b544895d-h2wcr\" (UID: \"c272889e-62f7-4ce2-8a38-e15945d984d9\") " pod="openstack/barbican-keystone-listener-58b544895d-h2wcr" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.382733 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ph574\" (UniqueName: \"kubernetes.io/projected/daeb7979-ef24-45e4-888f-a917ecbec75f-kube-api-access-ph574\") pod \"dnsmasq-dns-688c87cc99-gnhfq\" (UID: \"daeb7979-ef24-45e4-888f-a917ecbec75f\") " pod="openstack/dnsmasq-dns-688c87cc99-gnhfq" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.382769 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6vv5g\" (UniqueName: \"kubernetes.io/projected/c272889e-62f7-4ce2-8a38-e15945d984d9-kube-api-access-6vv5g\") pod \"barbican-keystone-listener-58b544895d-h2wcr\" (UID: \"c272889e-62f7-4ce2-8a38-e15945d984d9\") " pod="openstack/barbican-keystone-listener-58b544895d-h2wcr" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.383382 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c272889e-62f7-4ce2-8a38-e15945d984d9-logs\") pod \"barbican-keystone-listener-58b544895d-h2wcr\" (UID: \"c272889e-62f7-4ce2-8a38-e15945d984d9\") " pod="openstack/barbican-keystone-listener-58b544895d-h2wcr" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.385989 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-66c6899d8d-whfkr"] Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.396683 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c272889e-62f7-4ce2-8a38-e15945d984d9-combined-ca-bundle\") pod \"barbican-keystone-listener-58b544895d-h2wcr\" (UID: \"c272889e-62f7-4ce2-8a38-e15945d984d9\") " pod="openstack/barbican-keystone-listener-58b544895d-h2wcr" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.403891 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c272889e-62f7-4ce2-8a38-e15945d984d9-config-data-custom\") pod \"barbican-keystone-listener-58b544895d-h2wcr\" (UID: \"c272889e-62f7-4ce2-8a38-e15945d984d9\") " pod="openstack/barbican-keystone-listener-58b544895d-h2wcr" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.409977 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6vv5g\" (UniqueName: \"kubernetes.io/projected/c272889e-62f7-4ce2-8a38-e15945d984d9-kube-api-access-6vv5g\") pod \"barbican-keystone-listener-58b544895d-h2wcr\" (UID: \"c272889e-62f7-4ce2-8a38-e15945d984d9\") " pod="openstack/barbican-keystone-listener-58b544895d-h2wcr" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.410962 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c272889e-62f7-4ce2-8a38-e15945d984d9-config-data\") pod \"barbican-keystone-listener-58b544895d-h2wcr\" (UID: \"c272889e-62f7-4ce2-8a38-e15945d984d9\") " pod="openstack/barbican-keystone-listener-58b544895d-h2wcr" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.443732 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-69b574cd87-g8xmw" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.476887 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-58b544895d-h2wcr" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.484320 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6686b0a0-8673-4e80-9763-64bc2d17834b-config-data-custom\") pod \"barbican-api-66c6899d8d-whfkr\" (UID: \"6686b0a0-8673-4e80-9763-64bc2d17834b\") " pod="openstack/barbican-api-66c6899d8d-whfkr" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.484384 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/daeb7979-ef24-45e4-888f-a917ecbec75f-ovsdbserver-sb\") pod \"dnsmasq-dns-688c87cc99-gnhfq\" (UID: \"daeb7979-ef24-45e4-888f-a917ecbec75f\") " pod="openstack/dnsmasq-dns-688c87cc99-gnhfq" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.484455 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/daeb7979-ef24-45e4-888f-a917ecbec75f-config\") pod \"dnsmasq-dns-688c87cc99-gnhfq\" (UID: \"daeb7979-ef24-45e4-888f-a917ecbec75f\") " pod="openstack/dnsmasq-dns-688c87cc99-gnhfq" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.484496 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/daeb7979-ef24-45e4-888f-a917ecbec75f-dns-svc\") pod \"dnsmasq-dns-688c87cc99-gnhfq\" (UID: \"daeb7979-ef24-45e4-888f-a917ecbec75f\") " pod="openstack/dnsmasq-dns-688c87cc99-gnhfq" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.484540 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/daeb7979-ef24-45e4-888f-a917ecbec75f-dns-swift-storage-0\") pod \"dnsmasq-dns-688c87cc99-gnhfq\" (UID: \"daeb7979-ef24-45e4-888f-a917ecbec75f\") " pod="openstack/dnsmasq-dns-688c87cc99-gnhfq" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.484571 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/daeb7979-ef24-45e4-888f-a917ecbec75f-ovsdbserver-nb\") pod \"dnsmasq-dns-688c87cc99-gnhfq\" (UID: \"daeb7979-ef24-45e4-888f-a917ecbec75f\") " pod="openstack/dnsmasq-dns-688c87cc99-gnhfq" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.484618 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6686b0a0-8673-4e80-9763-64bc2d17834b-config-data\") pod \"barbican-api-66c6899d8d-whfkr\" (UID: \"6686b0a0-8673-4e80-9763-64bc2d17834b\") " pod="openstack/barbican-api-66c6899d8d-whfkr" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.484647 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5tjwg\" (UniqueName: \"kubernetes.io/projected/6686b0a0-8673-4e80-9763-64bc2d17834b-kube-api-access-5tjwg\") pod \"barbican-api-66c6899d8d-whfkr\" (UID: \"6686b0a0-8673-4e80-9763-64bc2d17834b\") " pod="openstack/barbican-api-66c6899d8d-whfkr" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.484678 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6686b0a0-8673-4e80-9763-64bc2d17834b-logs\") pod \"barbican-api-66c6899d8d-whfkr\" (UID: \"6686b0a0-8673-4e80-9763-64bc2d17834b\") " pod="openstack/barbican-api-66c6899d8d-whfkr" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.484728 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6686b0a0-8673-4e80-9763-64bc2d17834b-combined-ca-bundle\") pod \"barbican-api-66c6899d8d-whfkr\" (UID: \"6686b0a0-8673-4e80-9763-64bc2d17834b\") " pod="openstack/barbican-api-66c6899d8d-whfkr" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.484758 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ph574\" (UniqueName: \"kubernetes.io/projected/daeb7979-ef24-45e4-888f-a917ecbec75f-kube-api-access-ph574\") pod \"dnsmasq-dns-688c87cc99-gnhfq\" (UID: \"daeb7979-ef24-45e4-888f-a917ecbec75f\") " pod="openstack/dnsmasq-dns-688c87cc99-gnhfq" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.485918 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/daeb7979-ef24-45e4-888f-a917ecbec75f-dns-swift-storage-0\") pod \"dnsmasq-dns-688c87cc99-gnhfq\" (UID: \"daeb7979-ef24-45e4-888f-a917ecbec75f\") " pod="openstack/dnsmasq-dns-688c87cc99-gnhfq" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.486577 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/daeb7979-ef24-45e4-888f-a917ecbec75f-ovsdbserver-nb\") pod \"dnsmasq-dns-688c87cc99-gnhfq\" (UID: \"daeb7979-ef24-45e4-888f-a917ecbec75f\") " pod="openstack/dnsmasq-dns-688c87cc99-gnhfq" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.486951 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/daeb7979-ef24-45e4-888f-a917ecbec75f-dns-svc\") pod \"dnsmasq-dns-688c87cc99-gnhfq\" (UID: \"daeb7979-ef24-45e4-888f-a917ecbec75f\") " pod="openstack/dnsmasq-dns-688c87cc99-gnhfq" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.487481 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/daeb7979-ef24-45e4-888f-a917ecbec75f-config\") pod \"dnsmasq-dns-688c87cc99-gnhfq\" (UID: \"daeb7979-ef24-45e4-888f-a917ecbec75f\") " pod="openstack/dnsmasq-dns-688c87cc99-gnhfq" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.488428 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/daeb7979-ef24-45e4-888f-a917ecbec75f-ovsdbserver-sb\") pod \"dnsmasq-dns-688c87cc99-gnhfq\" (UID: \"daeb7979-ef24-45e4-888f-a917ecbec75f\") " pod="openstack/dnsmasq-dns-688c87cc99-gnhfq" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.500852 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ph574\" (UniqueName: \"kubernetes.io/projected/daeb7979-ef24-45e4-888f-a917ecbec75f-kube-api-access-ph574\") pod \"dnsmasq-dns-688c87cc99-gnhfq\" (UID: \"daeb7979-ef24-45e4-888f-a917ecbec75f\") " pod="openstack/dnsmasq-dns-688c87cc99-gnhfq" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.554627 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-688c87cc99-gnhfq" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.606338 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6686b0a0-8673-4e80-9763-64bc2d17834b-config-data-custom\") pod \"barbican-api-66c6899d8d-whfkr\" (UID: \"6686b0a0-8673-4e80-9763-64bc2d17834b\") " pod="openstack/barbican-api-66c6899d8d-whfkr" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.606454 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6686b0a0-8673-4e80-9763-64bc2d17834b-config-data\") pod \"barbican-api-66c6899d8d-whfkr\" (UID: \"6686b0a0-8673-4e80-9763-64bc2d17834b\") " pod="openstack/barbican-api-66c6899d8d-whfkr" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.606479 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5tjwg\" (UniqueName: \"kubernetes.io/projected/6686b0a0-8673-4e80-9763-64bc2d17834b-kube-api-access-5tjwg\") pod \"barbican-api-66c6899d8d-whfkr\" (UID: \"6686b0a0-8673-4e80-9763-64bc2d17834b\") " pod="openstack/barbican-api-66c6899d8d-whfkr" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.606501 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6686b0a0-8673-4e80-9763-64bc2d17834b-logs\") pod \"barbican-api-66c6899d8d-whfkr\" (UID: \"6686b0a0-8673-4e80-9763-64bc2d17834b\") " pod="openstack/barbican-api-66c6899d8d-whfkr" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.606537 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6686b0a0-8673-4e80-9763-64bc2d17834b-combined-ca-bundle\") pod \"barbican-api-66c6899d8d-whfkr\" (UID: \"6686b0a0-8673-4e80-9763-64bc2d17834b\") " pod="openstack/barbican-api-66c6899d8d-whfkr" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.614973 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6686b0a0-8673-4e80-9763-64bc2d17834b-logs\") pod \"barbican-api-66c6899d8d-whfkr\" (UID: \"6686b0a0-8673-4e80-9763-64bc2d17834b\") " pod="openstack/barbican-api-66c6899d8d-whfkr" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.626156 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6686b0a0-8673-4e80-9763-64bc2d17834b-config-data-custom\") pod \"barbican-api-66c6899d8d-whfkr\" (UID: \"6686b0a0-8673-4e80-9763-64bc2d17834b\") " pod="openstack/barbican-api-66c6899d8d-whfkr" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.642092 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6686b0a0-8673-4e80-9763-64bc2d17834b-combined-ca-bundle\") pod \"barbican-api-66c6899d8d-whfkr\" (UID: \"6686b0a0-8673-4e80-9763-64bc2d17834b\") " pod="openstack/barbican-api-66c6899d8d-whfkr" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.644430 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6686b0a0-8673-4e80-9763-64bc2d17834b-config-data\") pod \"barbican-api-66c6899d8d-whfkr\" (UID: \"6686b0a0-8673-4e80-9763-64bc2d17834b\") " pod="openstack/barbican-api-66c6899d8d-whfkr" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.663946 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5tjwg\" (UniqueName: \"kubernetes.io/projected/6686b0a0-8673-4e80-9763-64bc2d17834b-kube-api-access-5tjwg\") pod \"barbican-api-66c6899d8d-whfkr\" (UID: \"6686b0a0-8673-4e80-9763-64bc2d17834b\") " pod="openstack/barbican-api-66c6899d8d-whfkr" Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.765971 4753 generic.go:334] "Generic (PLEG): container finished" podID="cffca222-0336-40c8-886f-247667322702" containerID="b0490a0fecfb0d034ea03e1168b2d4cfd167f436e290a0ebc77767227007f70f" exitCode=2 Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.767009 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cffca222-0336-40c8-886f-247667322702","Type":"ContainerDied","Data":"b0490a0fecfb0d034ea03e1168b2d4cfd167f436e290a0ebc77767227007f70f"} Dec 05 17:34:50 crc kubenswrapper[4753]: I1205 17:34:50.781283 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-66c6899d8d-whfkr" Dec 05 17:34:51 crc kubenswrapper[4753]: I1205 17:34:51.004572 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-c57d65f96-d2bw4" Dec 05 17:34:51 crc kubenswrapper[4753]: I1205 17:34:51.096810 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-69b574cd87-g8xmw"] Dec 05 17:34:51 crc kubenswrapper[4753]: I1205 17:34:51.204140 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-58b544895d-h2wcr"] Dec 05 17:34:51 crc kubenswrapper[4753]: I1205 17:34:51.338713 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-688c87cc99-gnhfq"] Dec 05 17:34:51 crc kubenswrapper[4753]: W1205 17:34:51.341094 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddaeb7979_ef24_45e4_888f_a917ecbec75f.slice/crio-6aae9b1bd7302abfe5975f2a289519d21f0d8bb714682e375d9f647033cadc05 WatchSource:0}: Error finding container 6aae9b1bd7302abfe5975f2a289519d21f0d8bb714682e375d9f647033cadc05: Status 404 returned error can't find the container with id 6aae9b1bd7302abfe5975f2a289519d21f0d8bb714682e375d9f647033cadc05 Dec 05 17:34:51 crc kubenswrapper[4753]: I1205 17:34:51.416992 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-66c6899d8d-whfkr"] Dec 05 17:34:51 crc kubenswrapper[4753]: I1205 17:34:51.781905 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-688c87cc99-gnhfq" event={"ID":"daeb7979-ef24-45e4-888f-a917ecbec75f","Type":"ContainerStarted","Data":"6aae9b1bd7302abfe5975f2a289519d21f0d8bb714682e375d9f647033cadc05"} Dec 05 17:34:51 crc kubenswrapper[4753]: I1205 17:34:51.786685 4753 generic.go:334] "Generic (PLEG): container finished" podID="68a4a494-d4ff-43ee-a74c-4f0377d229ec" containerID="a05c9724617522c7f719a6bb68143d24b7811e88120000f953246afd4de857bc" exitCode=0 Dec 05 17:34:51 crc kubenswrapper[4753]: I1205 17:34:51.786801 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-f6c6z" event={"ID":"68a4a494-d4ff-43ee-a74c-4f0377d229ec","Type":"ContainerDied","Data":"a05c9724617522c7f719a6bb68143d24b7811e88120000f953246afd4de857bc"} Dec 05 17:34:51 crc kubenswrapper[4753]: I1205 17:34:51.790552 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-69b574cd87-g8xmw" event={"ID":"491b4a49-e02d-41a2-b783-b3dddbedbc57","Type":"ContainerStarted","Data":"f7b649d0834978e962fce1735744eb82f7e96883c780b4f15cc2fc4664af0b7f"} Dec 05 17:34:51 crc kubenswrapper[4753]: I1205 17:34:51.794810 4753 generic.go:334] "Generic (PLEG): container finished" podID="cffca222-0336-40c8-886f-247667322702" containerID="c5c0a4fafd3b40829436466d86fa7132ec845878079668fc5f2fb5a78a8c4968" exitCode=0 Dec 05 17:34:51 crc kubenswrapper[4753]: I1205 17:34:51.794892 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cffca222-0336-40c8-886f-247667322702","Type":"ContainerDied","Data":"c5c0a4fafd3b40829436466d86fa7132ec845878079668fc5f2fb5a78a8c4968"} Dec 05 17:34:51 crc kubenswrapper[4753]: I1205 17:34:51.798749 4753 generic.go:334] "Generic (PLEG): container finished" podID="f5711686-6b64-450f-b2b4-6583dab08275" containerID="56de0dcec4169c18c85433352029914ff2efd1c1bb8af2c7a6d4a87f80f63ae2" exitCode=0 Dec 05 17:34:51 crc kubenswrapper[4753]: I1205 17:34:51.799353 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-sync-9vsjw" event={"ID":"f5711686-6b64-450f-b2b4-6583dab08275","Type":"ContainerDied","Data":"56de0dcec4169c18c85433352029914ff2efd1c1bb8af2c7a6d4a87f80f63ae2"} Dec 05 17:34:51 crc kubenswrapper[4753]: I1205 17:34:51.810702 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-58b544895d-h2wcr" event={"ID":"c272889e-62f7-4ce2-8a38-e15945d984d9","Type":"ContainerStarted","Data":"45966ea2f50e4d934e75de87d52878bca71b6731cd0154499677d5d38c7a833a"} Dec 05 17:34:51 crc kubenswrapper[4753]: I1205 17:34:51.818930 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-66c6899d8d-whfkr" event={"ID":"6686b0a0-8673-4e80-9763-64bc2d17834b","Type":"ContainerStarted","Data":"8399da123101c0dc947d00005889895a4ab4ddd66c0d13f7afbcf9c65362849c"} Dec 05 17:34:51 crc kubenswrapper[4753]: I1205 17:34:51.818972 4753 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 17:34:51 crc kubenswrapper[4753]: I1205 17:34:51.819012 4753 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 17:34:52 crc kubenswrapper[4753]: I1205 17:34:52.403115 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 05 17:34:52 crc kubenswrapper[4753]: I1205 17:34:52.651714 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 05 17:34:52 crc kubenswrapper[4753]: I1205 17:34:52.832894 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-66c6899d8d-whfkr" event={"ID":"6686b0a0-8673-4e80-9763-64bc2d17834b","Type":"ContainerStarted","Data":"16a611b787135d2752ac8dd584d360f5edaa9fc39463d3e19413fe8aff788e4c"} Dec 05 17:34:52 crc kubenswrapper[4753]: I1205 17:34:52.832944 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-66c6899d8d-whfkr" event={"ID":"6686b0a0-8673-4e80-9763-64bc2d17834b","Type":"ContainerStarted","Data":"d0c9e642b21e6139c5a66b59c596c1f7aa3e4672d9fe42c96f474da7a4e9e87d"} Dec 05 17:34:52 crc kubenswrapper[4753]: I1205 17:34:52.832984 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-66c6899d8d-whfkr" Dec 05 17:34:52 crc kubenswrapper[4753]: I1205 17:34:52.833006 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-66c6899d8d-whfkr" Dec 05 17:34:52 crc kubenswrapper[4753]: I1205 17:34:52.835202 4753 generic.go:334] "Generic (PLEG): container finished" podID="daeb7979-ef24-45e4-888f-a917ecbec75f" containerID="6b165c416932a31c920301856cfa1d870b5fef69dd15a4fa75e1632504ff700b" exitCode=0 Dec 05 17:34:52 crc kubenswrapper[4753]: I1205 17:34:52.835266 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-688c87cc99-gnhfq" event={"ID":"daeb7979-ef24-45e4-888f-a917ecbec75f","Type":"ContainerDied","Data":"6b165c416932a31c920301856cfa1d870b5fef69dd15a4fa75e1632504ff700b"} Dec 05 17:34:52 crc kubenswrapper[4753]: I1205 17:34:52.907046 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-66c6899d8d-whfkr" podStartSLOduration=2.90702751 podStartE2EDuration="2.90702751s" podCreationTimestamp="2025-12-05 17:34:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:34:52.861826817 +0000 UTC m=+1831.364933833" watchObservedRunningTime="2025-12-05 17:34:52.90702751 +0000 UTC m=+1831.410134516" Dec 05 17:34:53 crc kubenswrapper[4753]: E1205 17:34:53.150863 4753 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0acfa75f_8372_4ccb_a91a_6741854cdd34.slice\": RecentStats: unable to find data in memory cache]" Dec 05 17:34:53 crc kubenswrapper[4753]: I1205 17:34:53.426037 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-7f996df5b6-dlvm2"] Dec 05 17:34:53 crc kubenswrapper[4753]: I1205 17:34:53.428344 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7f996df5b6-dlvm2" Dec 05 17:34:53 crc kubenswrapper[4753]: I1205 17:34:53.433072 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Dec 05 17:34:53 crc kubenswrapper[4753]: I1205 17:34:53.433228 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Dec 05 17:34:53 crc kubenswrapper[4753]: I1205 17:34:53.442194 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-7f996df5b6-dlvm2"] Dec 05 17:34:53 crc kubenswrapper[4753]: I1205 17:34:53.586028 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2f3c34d4-e5fc-41ba-a8f0-3a5df24c9013-internal-tls-certs\") pod \"barbican-api-7f996df5b6-dlvm2\" (UID: \"2f3c34d4-e5fc-41ba-a8f0-3a5df24c9013\") " pod="openstack/barbican-api-7f996df5b6-dlvm2" Dec 05 17:34:53 crc kubenswrapper[4753]: I1205 17:34:53.586216 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2f3c34d4-e5fc-41ba-a8f0-3a5df24c9013-config-data-custom\") pod \"barbican-api-7f996df5b6-dlvm2\" (UID: \"2f3c34d4-e5fc-41ba-a8f0-3a5df24c9013\") " pod="openstack/barbican-api-7f996df5b6-dlvm2" Dec 05 17:34:53 crc kubenswrapper[4753]: I1205 17:34:53.586253 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lw5fz\" (UniqueName: \"kubernetes.io/projected/2f3c34d4-e5fc-41ba-a8f0-3a5df24c9013-kube-api-access-lw5fz\") pod \"barbican-api-7f996df5b6-dlvm2\" (UID: \"2f3c34d4-e5fc-41ba-a8f0-3a5df24c9013\") " pod="openstack/barbican-api-7f996df5b6-dlvm2" Dec 05 17:34:53 crc kubenswrapper[4753]: I1205 17:34:53.586373 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f3c34d4-e5fc-41ba-a8f0-3a5df24c9013-combined-ca-bundle\") pod \"barbican-api-7f996df5b6-dlvm2\" (UID: \"2f3c34d4-e5fc-41ba-a8f0-3a5df24c9013\") " pod="openstack/barbican-api-7f996df5b6-dlvm2" Dec 05 17:34:53 crc kubenswrapper[4753]: I1205 17:34:53.586510 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2f3c34d4-e5fc-41ba-a8f0-3a5df24c9013-logs\") pod \"barbican-api-7f996df5b6-dlvm2\" (UID: \"2f3c34d4-e5fc-41ba-a8f0-3a5df24c9013\") " pod="openstack/barbican-api-7f996df5b6-dlvm2" Dec 05 17:34:53 crc kubenswrapper[4753]: I1205 17:34:53.586572 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2f3c34d4-e5fc-41ba-a8f0-3a5df24c9013-public-tls-certs\") pod \"barbican-api-7f996df5b6-dlvm2\" (UID: \"2f3c34d4-e5fc-41ba-a8f0-3a5df24c9013\") " pod="openstack/barbican-api-7f996df5b6-dlvm2" Dec 05 17:34:53 crc kubenswrapper[4753]: I1205 17:34:53.586628 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2f3c34d4-e5fc-41ba-a8f0-3a5df24c9013-config-data\") pod \"barbican-api-7f996df5b6-dlvm2\" (UID: \"2f3c34d4-e5fc-41ba-a8f0-3a5df24c9013\") " pod="openstack/barbican-api-7f996df5b6-dlvm2" Dec 05 17:34:53 crc kubenswrapper[4753]: I1205 17:34:53.688450 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2f3c34d4-e5fc-41ba-a8f0-3a5df24c9013-config-data\") pod \"barbican-api-7f996df5b6-dlvm2\" (UID: \"2f3c34d4-e5fc-41ba-a8f0-3a5df24c9013\") " pod="openstack/barbican-api-7f996df5b6-dlvm2" Dec 05 17:34:53 crc kubenswrapper[4753]: I1205 17:34:53.688530 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2f3c34d4-e5fc-41ba-a8f0-3a5df24c9013-internal-tls-certs\") pod \"barbican-api-7f996df5b6-dlvm2\" (UID: \"2f3c34d4-e5fc-41ba-a8f0-3a5df24c9013\") " pod="openstack/barbican-api-7f996df5b6-dlvm2" Dec 05 17:34:53 crc kubenswrapper[4753]: I1205 17:34:53.688609 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2f3c34d4-e5fc-41ba-a8f0-3a5df24c9013-config-data-custom\") pod \"barbican-api-7f996df5b6-dlvm2\" (UID: \"2f3c34d4-e5fc-41ba-a8f0-3a5df24c9013\") " pod="openstack/barbican-api-7f996df5b6-dlvm2" Dec 05 17:34:53 crc kubenswrapper[4753]: I1205 17:34:53.688636 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lw5fz\" (UniqueName: \"kubernetes.io/projected/2f3c34d4-e5fc-41ba-a8f0-3a5df24c9013-kube-api-access-lw5fz\") pod \"barbican-api-7f996df5b6-dlvm2\" (UID: \"2f3c34d4-e5fc-41ba-a8f0-3a5df24c9013\") " pod="openstack/barbican-api-7f996df5b6-dlvm2" Dec 05 17:34:53 crc kubenswrapper[4753]: I1205 17:34:53.688675 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f3c34d4-e5fc-41ba-a8f0-3a5df24c9013-combined-ca-bundle\") pod \"barbican-api-7f996df5b6-dlvm2\" (UID: \"2f3c34d4-e5fc-41ba-a8f0-3a5df24c9013\") " pod="openstack/barbican-api-7f996df5b6-dlvm2" Dec 05 17:34:53 crc kubenswrapper[4753]: I1205 17:34:53.689137 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2f3c34d4-e5fc-41ba-a8f0-3a5df24c9013-logs\") pod \"barbican-api-7f996df5b6-dlvm2\" (UID: \"2f3c34d4-e5fc-41ba-a8f0-3a5df24c9013\") " pod="openstack/barbican-api-7f996df5b6-dlvm2" Dec 05 17:34:53 crc kubenswrapper[4753]: I1205 17:34:53.689543 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2f3c34d4-e5fc-41ba-a8f0-3a5df24c9013-logs\") pod \"barbican-api-7f996df5b6-dlvm2\" (UID: \"2f3c34d4-e5fc-41ba-a8f0-3a5df24c9013\") " pod="openstack/barbican-api-7f996df5b6-dlvm2" Dec 05 17:34:53 crc kubenswrapper[4753]: I1205 17:34:53.689686 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2f3c34d4-e5fc-41ba-a8f0-3a5df24c9013-public-tls-certs\") pod \"barbican-api-7f996df5b6-dlvm2\" (UID: \"2f3c34d4-e5fc-41ba-a8f0-3a5df24c9013\") " pod="openstack/barbican-api-7f996df5b6-dlvm2" Dec 05 17:34:53 crc kubenswrapper[4753]: I1205 17:34:53.697523 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f3c34d4-e5fc-41ba-a8f0-3a5df24c9013-combined-ca-bundle\") pod \"barbican-api-7f996df5b6-dlvm2\" (UID: \"2f3c34d4-e5fc-41ba-a8f0-3a5df24c9013\") " pod="openstack/barbican-api-7f996df5b6-dlvm2" Dec 05 17:34:53 crc kubenswrapper[4753]: I1205 17:34:53.699668 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2f3c34d4-e5fc-41ba-a8f0-3a5df24c9013-config-data\") pod \"barbican-api-7f996df5b6-dlvm2\" (UID: \"2f3c34d4-e5fc-41ba-a8f0-3a5df24c9013\") " pod="openstack/barbican-api-7f996df5b6-dlvm2" Dec 05 17:34:53 crc kubenswrapper[4753]: I1205 17:34:53.701309 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2f3c34d4-e5fc-41ba-a8f0-3a5df24c9013-config-data-custom\") pod \"barbican-api-7f996df5b6-dlvm2\" (UID: \"2f3c34d4-e5fc-41ba-a8f0-3a5df24c9013\") " pod="openstack/barbican-api-7f996df5b6-dlvm2" Dec 05 17:34:53 crc kubenswrapper[4753]: I1205 17:34:53.702367 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2f3c34d4-e5fc-41ba-a8f0-3a5df24c9013-internal-tls-certs\") pod \"barbican-api-7f996df5b6-dlvm2\" (UID: \"2f3c34d4-e5fc-41ba-a8f0-3a5df24c9013\") " pod="openstack/barbican-api-7f996df5b6-dlvm2" Dec 05 17:34:53 crc kubenswrapper[4753]: I1205 17:34:53.702612 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2f3c34d4-e5fc-41ba-a8f0-3a5df24c9013-public-tls-certs\") pod \"barbican-api-7f996df5b6-dlvm2\" (UID: \"2f3c34d4-e5fc-41ba-a8f0-3a5df24c9013\") " pod="openstack/barbican-api-7f996df5b6-dlvm2" Dec 05 17:34:53 crc kubenswrapper[4753]: I1205 17:34:53.709615 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lw5fz\" (UniqueName: \"kubernetes.io/projected/2f3c34d4-e5fc-41ba-a8f0-3a5df24c9013-kube-api-access-lw5fz\") pod \"barbican-api-7f996df5b6-dlvm2\" (UID: \"2f3c34d4-e5fc-41ba-a8f0-3a5df24c9013\") " pod="openstack/barbican-api-7f996df5b6-dlvm2" Dec 05 17:34:53 crc kubenswrapper[4753]: I1205 17:34:53.766968 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7f996df5b6-dlvm2" Dec 05 17:34:53 crc kubenswrapper[4753]: I1205 17:34:53.847136 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-f6c6z" event={"ID":"68a4a494-d4ff-43ee-a74c-4f0377d229ec","Type":"ContainerDied","Data":"6cbd5092e3875836c748f4ff566a0ac135c05f3b80857ed05ccbb2de2bf200d7"} Dec 05 17:34:53 crc kubenswrapper[4753]: I1205 17:34:53.847266 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6cbd5092e3875836c748f4ff566a0ac135c05f3b80857ed05ccbb2de2bf200d7" Dec 05 17:34:53 crc kubenswrapper[4753]: I1205 17:34:53.860596 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-sync-9vsjw" event={"ID":"f5711686-6b64-450f-b2b4-6583dab08275","Type":"ContainerDied","Data":"a40dad9343c3f0581c8eed8a3c26c14028f295eba2e08338ebe58184ec8e2568"} Dec 05 17:34:53 crc kubenswrapper[4753]: I1205 17:34:53.860658 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a40dad9343c3f0581c8eed8a3c26c14028f295eba2e08338ebe58184ec8e2568" Dec 05 17:34:53 crc kubenswrapper[4753]: I1205 17:34:53.932957 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-sync-9vsjw" Dec 05 17:34:53 crc kubenswrapper[4753]: I1205 17:34:53.964686 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-f6c6z" Dec 05 17:34:53 crc kubenswrapper[4753]: I1205 17:34:53.996059 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t2db6\" (UniqueName: \"kubernetes.io/projected/f5711686-6b64-450f-b2b4-6583dab08275-kube-api-access-t2db6\") pod \"f5711686-6b64-450f-b2b4-6583dab08275\" (UID: \"f5711686-6b64-450f-b2b4-6583dab08275\") " Dec 05 17:34:53 crc kubenswrapper[4753]: I1205 17:34:53.996213 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f5711686-6b64-450f-b2b4-6583dab08275-scripts\") pod \"f5711686-6b64-450f-b2b4-6583dab08275\" (UID: \"f5711686-6b64-450f-b2b4-6583dab08275\") " Dec 05 17:34:53 crc kubenswrapper[4753]: I1205 17:34:53.996266 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5711686-6b64-450f-b2b4-6583dab08275-config-data\") pod \"f5711686-6b64-450f-b2b4-6583dab08275\" (UID: \"f5711686-6b64-450f-b2b4-6583dab08275\") " Dec 05 17:34:53 crc kubenswrapper[4753]: I1205 17:34:53.996312 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5711686-6b64-450f-b2b4-6583dab08275-combined-ca-bundle\") pod \"f5711686-6b64-450f-b2b4-6583dab08275\" (UID: \"f5711686-6b64-450f-b2b4-6583dab08275\") " Dec 05 17:34:53 crc kubenswrapper[4753]: I1205 17:34:53.996358 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/f5711686-6b64-450f-b2b4-6583dab08275-certs\") pod \"f5711686-6b64-450f-b2b4-6583dab08275\" (UID: \"f5711686-6b64-450f-b2b4-6583dab08275\") " Dec 05 17:34:54 crc kubenswrapper[4753]: I1205 17:34:54.002429 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5711686-6b64-450f-b2b4-6583dab08275-kube-api-access-t2db6" (OuterVolumeSpecName: "kube-api-access-t2db6") pod "f5711686-6b64-450f-b2b4-6583dab08275" (UID: "f5711686-6b64-450f-b2b4-6583dab08275"). InnerVolumeSpecName "kube-api-access-t2db6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:34:54 crc kubenswrapper[4753]: I1205 17:34:54.006504 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5711686-6b64-450f-b2b4-6583dab08275-scripts" (OuterVolumeSpecName: "scripts") pod "f5711686-6b64-450f-b2b4-6583dab08275" (UID: "f5711686-6b64-450f-b2b4-6583dab08275"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:34:54 crc kubenswrapper[4753]: I1205 17:34:54.012277 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5711686-6b64-450f-b2b4-6583dab08275-certs" (OuterVolumeSpecName: "certs") pod "f5711686-6b64-450f-b2b4-6583dab08275" (UID: "f5711686-6b64-450f-b2b4-6583dab08275"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:34:54 crc kubenswrapper[4753]: I1205 17:34:54.062388 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5711686-6b64-450f-b2b4-6583dab08275-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f5711686-6b64-450f-b2b4-6583dab08275" (UID: "f5711686-6b64-450f-b2b4-6583dab08275"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:34:54 crc kubenswrapper[4753]: I1205 17:34:54.070200 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5711686-6b64-450f-b2b4-6583dab08275-config-data" (OuterVolumeSpecName: "config-data") pod "f5711686-6b64-450f-b2b4-6583dab08275" (UID: "f5711686-6b64-450f-b2b4-6583dab08275"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:34:54 crc kubenswrapper[4753]: I1205 17:34:54.098012 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/68a4a494-d4ff-43ee-a74c-4f0377d229ec-config-data\") pod \"68a4a494-d4ff-43ee-a74c-4f0377d229ec\" (UID: \"68a4a494-d4ff-43ee-a74c-4f0377d229ec\") " Dec 05 17:34:54 crc kubenswrapper[4753]: I1205 17:34:54.098092 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t57k6\" (UniqueName: \"kubernetes.io/projected/68a4a494-d4ff-43ee-a74c-4f0377d229ec-kube-api-access-t57k6\") pod \"68a4a494-d4ff-43ee-a74c-4f0377d229ec\" (UID: \"68a4a494-d4ff-43ee-a74c-4f0377d229ec\") " Dec 05 17:34:54 crc kubenswrapper[4753]: I1205 17:34:54.098172 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/68a4a494-d4ff-43ee-a74c-4f0377d229ec-etc-machine-id\") pod \"68a4a494-d4ff-43ee-a74c-4f0377d229ec\" (UID: \"68a4a494-d4ff-43ee-a74c-4f0377d229ec\") " Dec 05 17:34:54 crc kubenswrapper[4753]: I1205 17:34:54.098208 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/68a4a494-d4ff-43ee-a74c-4f0377d229ec-scripts\") pod \"68a4a494-d4ff-43ee-a74c-4f0377d229ec\" (UID: \"68a4a494-d4ff-43ee-a74c-4f0377d229ec\") " Dec 05 17:34:54 crc kubenswrapper[4753]: I1205 17:34:54.098238 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/68a4a494-d4ff-43ee-a74c-4f0377d229ec-db-sync-config-data\") pod \"68a4a494-d4ff-43ee-a74c-4f0377d229ec\" (UID: \"68a4a494-d4ff-43ee-a74c-4f0377d229ec\") " Dec 05 17:34:54 crc kubenswrapper[4753]: I1205 17:34:54.098280 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68a4a494-d4ff-43ee-a74c-4f0377d229ec-combined-ca-bundle\") pod \"68a4a494-d4ff-43ee-a74c-4f0377d229ec\" (UID: \"68a4a494-d4ff-43ee-a74c-4f0377d229ec\") " Dec 05 17:34:54 crc kubenswrapper[4753]: I1205 17:34:54.098889 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t2db6\" (UniqueName: \"kubernetes.io/projected/f5711686-6b64-450f-b2b4-6583dab08275-kube-api-access-t2db6\") on node \"crc\" DevicePath \"\"" Dec 05 17:34:54 crc kubenswrapper[4753]: I1205 17:34:54.098917 4753 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f5711686-6b64-450f-b2b4-6583dab08275-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:34:54 crc kubenswrapper[4753]: I1205 17:34:54.098928 4753 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f5711686-6b64-450f-b2b4-6583dab08275-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:34:54 crc kubenswrapper[4753]: I1205 17:34:54.098940 4753 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f5711686-6b64-450f-b2b4-6583dab08275-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:34:54 crc kubenswrapper[4753]: I1205 17:34:54.098951 4753 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/projected/f5711686-6b64-450f-b2b4-6583dab08275-certs\") on node \"crc\" DevicePath \"\"" Dec 05 17:34:54 crc kubenswrapper[4753]: I1205 17:34:54.099204 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/68a4a494-d4ff-43ee-a74c-4f0377d229ec-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "68a4a494-d4ff-43ee-a74c-4f0377d229ec" (UID: "68a4a494-d4ff-43ee-a74c-4f0377d229ec"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 17:34:54 crc kubenswrapper[4753]: I1205 17:34:54.102968 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/68a4a494-d4ff-43ee-a74c-4f0377d229ec-scripts" (OuterVolumeSpecName: "scripts") pod "68a4a494-d4ff-43ee-a74c-4f0377d229ec" (UID: "68a4a494-d4ff-43ee-a74c-4f0377d229ec"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:34:54 crc kubenswrapper[4753]: I1205 17:34:54.103082 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/68a4a494-d4ff-43ee-a74c-4f0377d229ec-kube-api-access-t57k6" (OuterVolumeSpecName: "kube-api-access-t57k6") pod "68a4a494-d4ff-43ee-a74c-4f0377d229ec" (UID: "68a4a494-d4ff-43ee-a74c-4f0377d229ec"). InnerVolumeSpecName "kube-api-access-t57k6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:34:54 crc kubenswrapper[4753]: I1205 17:34:54.105496 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/68a4a494-d4ff-43ee-a74c-4f0377d229ec-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "68a4a494-d4ff-43ee-a74c-4f0377d229ec" (UID: "68a4a494-d4ff-43ee-a74c-4f0377d229ec"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:34:54 crc kubenswrapper[4753]: I1205 17:34:54.138338 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/68a4a494-d4ff-43ee-a74c-4f0377d229ec-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "68a4a494-d4ff-43ee-a74c-4f0377d229ec" (UID: "68a4a494-d4ff-43ee-a74c-4f0377d229ec"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:34:54 crc kubenswrapper[4753]: I1205 17:34:54.160246 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/68a4a494-d4ff-43ee-a74c-4f0377d229ec-config-data" (OuterVolumeSpecName: "config-data") pod "68a4a494-d4ff-43ee-a74c-4f0377d229ec" (UID: "68a4a494-d4ff-43ee-a74c-4f0377d229ec"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:34:54 crc kubenswrapper[4753]: I1205 17:34:54.201634 4753 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/68a4a494-d4ff-43ee-a74c-4f0377d229ec-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:34:54 crc kubenswrapper[4753]: I1205 17:34:54.201666 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t57k6\" (UniqueName: \"kubernetes.io/projected/68a4a494-d4ff-43ee-a74c-4f0377d229ec-kube-api-access-t57k6\") on node \"crc\" DevicePath \"\"" Dec 05 17:34:54 crc kubenswrapper[4753]: I1205 17:34:54.201678 4753 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/68a4a494-d4ff-43ee-a74c-4f0377d229ec-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 05 17:34:54 crc kubenswrapper[4753]: I1205 17:34:54.201688 4753 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/68a4a494-d4ff-43ee-a74c-4f0377d229ec-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:34:54 crc kubenswrapper[4753]: I1205 17:34:54.201696 4753 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/68a4a494-d4ff-43ee-a74c-4f0377d229ec-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:34:54 crc kubenswrapper[4753]: I1205 17:34:54.201704 4753 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68a4a494-d4ff-43ee-a74c-4f0377d229ec-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:34:54 crc kubenswrapper[4753]: I1205 17:34:54.269621 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-7f996df5b6-dlvm2"] Dec 05 17:34:54 crc kubenswrapper[4753]: I1205 17:34:54.886860 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-688c87cc99-gnhfq" event={"ID":"daeb7979-ef24-45e4-888f-a917ecbec75f","Type":"ContainerStarted","Data":"505a5c96eb831fd977437fb9178c97cc79c49886dc5df04c04f06d32b626a609"} Dec 05 17:34:54 crc kubenswrapper[4753]: I1205 17:34:54.888443 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-688c87cc99-gnhfq" Dec 05 17:34:54 crc kubenswrapper[4753]: I1205 17:34:54.891214 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-69b574cd87-g8xmw" event={"ID":"491b4a49-e02d-41a2-b783-b3dddbedbc57","Type":"ContainerStarted","Data":"40a5d684fcb3a60363033d58bdbec2354e99a332c01738e18b8c2597f8a5fd89"} Dec 05 17:34:54 crc kubenswrapper[4753]: I1205 17:34:54.891267 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-69b574cd87-g8xmw" event={"ID":"491b4a49-e02d-41a2-b783-b3dddbedbc57","Type":"ContainerStarted","Data":"499d583bda1a087c8cbd0429ebbea95083240a974213e29049607fd60cfdb0bd"} Dec 05 17:34:54 crc kubenswrapper[4753]: I1205 17:34:54.903450 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7f996df5b6-dlvm2" event={"ID":"2f3c34d4-e5fc-41ba-a8f0-3a5df24c9013","Type":"ContainerStarted","Data":"d67b15e74382c9a719f9f4983d719fc5da8c5c9fce79ee7cf9e0dda67be37fe7"} Dec 05 17:34:54 crc kubenswrapper[4753]: I1205 17:34:54.903757 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7f996df5b6-dlvm2" event={"ID":"2f3c34d4-e5fc-41ba-a8f0-3a5df24c9013","Type":"ContainerStarted","Data":"a32bd78ecaac3d0180926f8af65637ab716db0cc5091f68e516d690397bb72e6"} Dec 05 17:34:54 crc kubenswrapper[4753]: I1205 17:34:54.903849 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7f996df5b6-dlvm2" event={"ID":"2f3c34d4-e5fc-41ba-a8f0-3a5df24c9013","Type":"ContainerStarted","Data":"b0c98ad5131cb7d0187db2756b779fd079dee064e6205815b7857bc2c26f23ae"} Dec 05 17:34:54 crc kubenswrapper[4753]: I1205 17:34:54.904345 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-7f996df5b6-dlvm2" Dec 05 17:34:54 crc kubenswrapper[4753]: I1205 17:34:54.905420 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-7f996df5b6-dlvm2" Dec 05 17:34:54 crc kubenswrapper[4753]: I1205 17:34:54.914197 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-sync-9vsjw" Dec 05 17:34:54 crc kubenswrapper[4753]: I1205 17:34:54.917010 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-58b544895d-h2wcr" event={"ID":"c272889e-62f7-4ce2-8a38-e15945d984d9","Type":"ContainerStarted","Data":"8df344783ca372f4a4eaeae909edbcf3451fc881f380f619205811eca43cd390"} Dec 05 17:34:54 crc kubenswrapper[4753]: I1205 17:34:54.917098 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-58b544895d-h2wcr" event={"ID":"c272889e-62f7-4ce2-8a38-e15945d984d9","Type":"ContainerStarted","Data":"443e318cc68c66c546d6f7b272eda31d8b73bf4585100934c7c79207b7232fc9"} Dec 05 17:34:54 crc kubenswrapper[4753]: I1205 17:34:54.917273 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-f6c6z" Dec 05 17:34:54 crc kubenswrapper[4753]: I1205 17:34:54.934604 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-688c87cc99-gnhfq" podStartSLOduration=4.93457574 podStartE2EDuration="4.93457574s" podCreationTimestamp="2025-12-05 17:34:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:34:54.919485542 +0000 UTC m=+1833.422592558" watchObservedRunningTime="2025-12-05 17:34:54.93457574 +0000 UTC m=+1833.437682766" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.012467 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-69b574cd87-g8xmw" podStartSLOduration=2.4417158 podStartE2EDuration="5.012446231s" podCreationTimestamp="2025-12-05 17:34:50 +0000 UTC" firstStartedPulling="2025-12-05 17:34:51.104129777 +0000 UTC m=+1829.607236783" lastFinishedPulling="2025-12-05 17:34:53.674860188 +0000 UTC m=+1832.177967214" observedRunningTime="2025-12-05 17:34:54.962617726 +0000 UTC m=+1833.465724732" watchObservedRunningTime="2025-12-05 17:34:55.012446231 +0000 UTC m=+1833.515553237" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.014499 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-58b544895d-h2wcr" podStartSLOduration=2.555212792 podStartE2EDuration="5.014493349s" podCreationTimestamp="2025-12-05 17:34:50 +0000 UTC" firstStartedPulling="2025-12-05 17:34:51.215609932 +0000 UTC m=+1829.718716928" lastFinishedPulling="2025-12-05 17:34:53.674890479 +0000 UTC m=+1832.177997485" observedRunningTime="2025-12-05 17:34:54.984288161 +0000 UTC m=+1833.487395167" watchObservedRunningTime="2025-12-05 17:34:55.014493349 +0000 UTC m=+1833.517600345" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.055331 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-7f996df5b6-dlvm2" podStartSLOduration=2.055311718 podStartE2EDuration="2.055311718s" podCreationTimestamp="2025-12-05 17:34:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:34:55.02652833 +0000 UTC m=+1833.529635336" watchObservedRunningTime="2025-12-05 17:34:55.055311718 +0000 UTC m=+1833.558418734" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.174540 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-storageinit-k8xt5"] Dec 05 17:34:55 crc kubenswrapper[4753]: E1205 17:34:55.175038 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="68a4a494-d4ff-43ee-a74c-4f0377d229ec" containerName="cinder-db-sync" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.175064 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="68a4a494-d4ff-43ee-a74c-4f0377d229ec" containerName="cinder-db-sync" Dec 05 17:34:55 crc kubenswrapper[4753]: E1205 17:34:55.175091 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5711686-6b64-450f-b2b4-6583dab08275" containerName="cloudkitty-db-sync" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.175100 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5711686-6b64-450f-b2b4-6583dab08275" containerName="cloudkitty-db-sync" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.175334 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5711686-6b64-450f-b2b4-6583dab08275" containerName="cloudkitty-db-sync" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.175363 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="68a4a494-d4ff-43ee-a74c-4f0377d229ec" containerName="cinder-db-sync" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.176130 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-storageinit-k8xt5" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.180630 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.180991 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-scripts" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.181098 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cloudkitty-client-internal" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.181234 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-config-data" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.181113 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-cloudkitty-dockercfg-45bq2" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.189965 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-storageinit-k8xt5"] Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.303458 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.309040 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.311499 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.313050 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-qtvrt" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.313311 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.313725 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.317605 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.325962 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/855d7063-d23a-4a28-a652-e78ae24fcc27-combined-ca-bundle\") pod \"cloudkitty-storageinit-k8xt5\" (UID: \"855d7063-d23a-4a28-a652-e78ae24fcc27\") " pod="openstack/cloudkitty-storageinit-k8xt5" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.326188 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/855d7063-d23a-4a28-a652-e78ae24fcc27-scripts\") pod \"cloudkitty-storageinit-k8xt5\" (UID: \"855d7063-d23a-4a28-a652-e78ae24fcc27\") " pod="openstack/cloudkitty-storageinit-k8xt5" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.326321 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/855d7063-d23a-4a28-a652-e78ae24fcc27-certs\") pod \"cloudkitty-storageinit-k8xt5\" (UID: \"855d7063-d23a-4a28-a652-e78ae24fcc27\") " pod="openstack/cloudkitty-storageinit-k8xt5" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.326407 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wvs2c\" (UniqueName: \"kubernetes.io/projected/855d7063-d23a-4a28-a652-e78ae24fcc27-kube-api-access-wvs2c\") pod \"cloudkitty-storageinit-k8xt5\" (UID: \"855d7063-d23a-4a28-a652-e78ae24fcc27\") " pod="openstack/cloudkitty-storageinit-k8xt5" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.326525 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/855d7063-d23a-4a28-a652-e78ae24fcc27-config-data\") pod \"cloudkitty-storageinit-k8xt5\" (UID: \"855d7063-d23a-4a28-a652-e78ae24fcc27\") " pod="openstack/cloudkitty-storageinit-k8xt5" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.413178 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-688c87cc99-gnhfq"] Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.428158 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wvs2c\" (UniqueName: \"kubernetes.io/projected/855d7063-d23a-4a28-a652-e78ae24fcc27-kube-api-access-wvs2c\") pod \"cloudkitty-storageinit-k8xt5\" (UID: \"855d7063-d23a-4a28-a652-e78ae24fcc27\") " pod="openstack/cloudkitty-storageinit-k8xt5" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.428211 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a\") " pod="openstack/cinder-scheduler-0" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.428248 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a-scripts\") pod \"cinder-scheduler-0\" (UID: \"a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a\") " pod="openstack/cinder-scheduler-0" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.428308 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a-config-data\") pod \"cinder-scheduler-0\" (UID: \"a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a\") " pod="openstack/cinder-scheduler-0" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.428331 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/855d7063-d23a-4a28-a652-e78ae24fcc27-config-data\") pod \"cloudkitty-storageinit-k8xt5\" (UID: \"855d7063-d23a-4a28-a652-e78ae24fcc27\") " pod="openstack/cloudkitty-storageinit-k8xt5" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.428372 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/855d7063-d23a-4a28-a652-e78ae24fcc27-combined-ca-bundle\") pod \"cloudkitty-storageinit-k8xt5\" (UID: \"855d7063-d23a-4a28-a652-e78ae24fcc27\") " pod="openstack/cloudkitty-storageinit-k8xt5" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.428418 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/855d7063-d23a-4a28-a652-e78ae24fcc27-scripts\") pod \"cloudkitty-storageinit-k8xt5\" (UID: \"855d7063-d23a-4a28-a652-e78ae24fcc27\") " pod="openstack/cloudkitty-storageinit-k8xt5" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.428440 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a\") " pod="openstack/cinder-scheduler-0" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.428485 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-94jzd\" (UniqueName: \"kubernetes.io/projected/a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a-kube-api-access-94jzd\") pod \"cinder-scheduler-0\" (UID: \"a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a\") " pod="openstack/cinder-scheduler-0" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.428505 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a\") " pod="openstack/cinder-scheduler-0" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.428529 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/855d7063-d23a-4a28-a652-e78ae24fcc27-certs\") pod \"cloudkitty-storageinit-k8xt5\" (UID: \"855d7063-d23a-4a28-a652-e78ae24fcc27\") " pod="openstack/cloudkitty-storageinit-k8xt5" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.436372 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/855d7063-d23a-4a28-a652-e78ae24fcc27-scripts\") pod \"cloudkitty-storageinit-k8xt5\" (UID: \"855d7063-d23a-4a28-a652-e78ae24fcc27\") " pod="openstack/cloudkitty-storageinit-k8xt5" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.436956 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/855d7063-d23a-4a28-a652-e78ae24fcc27-combined-ca-bundle\") pod \"cloudkitty-storageinit-k8xt5\" (UID: \"855d7063-d23a-4a28-a652-e78ae24fcc27\") " pod="openstack/cloudkitty-storageinit-k8xt5" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.446866 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/projected/855d7063-d23a-4a28-a652-e78ae24fcc27-certs\") pod \"cloudkitty-storageinit-k8xt5\" (UID: \"855d7063-d23a-4a28-a652-e78ae24fcc27\") " pod="openstack/cloudkitty-storageinit-k8xt5" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.454972 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/855d7063-d23a-4a28-a652-e78ae24fcc27-config-data\") pod \"cloudkitty-storageinit-k8xt5\" (UID: \"855d7063-d23a-4a28-a652-e78ae24fcc27\") " pod="openstack/cloudkitty-storageinit-k8xt5" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.456771 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wvs2c\" (UniqueName: \"kubernetes.io/projected/855d7063-d23a-4a28-a652-e78ae24fcc27-kube-api-access-wvs2c\") pod \"cloudkitty-storageinit-k8xt5\" (UID: \"855d7063-d23a-4a28-a652-e78ae24fcc27\") " pod="openstack/cloudkitty-storageinit-k8xt5" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.490663 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6bb4fc677f-m2665"] Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.494057 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bb4fc677f-m2665" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.510259 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6bb4fc677f-m2665"] Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.512553 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-storageinit-k8xt5" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.536351 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.538200 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.540110 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a\") " pod="openstack/cinder-scheduler-0" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.540259 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a-scripts\") pod \"cinder-scheduler-0\" (UID: \"a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a\") " pod="openstack/cinder-scheduler-0" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.540405 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a-config-data\") pod \"cinder-scheduler-0\" (UID: \"a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a\") " pod="openstack/cinder-scheduler-0" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.540623 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a\") " pod="openstack/cinder-scheduler-0" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.540731 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-94jzd\" (UniqueName: \"kubernetes.io/projected/a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a-kube-api-access-94jzd\") pod \"cinder-scheduler-0\" (UID: \"a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a\") " pod="openstack/cinder-scheduler-0" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.540771 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a\") " pod="openstack/cinder-scheduler-0" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.544582 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.546070 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a\") " pod="openstack/cinder-scheduler-0" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.556742 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a\") " pod="openstack/cinder-scheduler-0" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.558288 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a\") " pod="openstack/cinder-scheduler-0" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.572876 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a-scripts\") pod \"cinder-scheduler-0\" (UID: \"a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a\") " pod="openstack/cinder-scheduler-0" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.573476 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a-config-data\") pod \"cinder-scheduler-0\" (UID: \"a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a\") " pod="openstack/cinder-scheduler-0" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.574096 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.578104 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-94jzd\" (UniqueName: \"kubernetes.io/projected/a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a-kube-api-access-94jzd\") pod \"cinder-scheduler-0\" (UID: \"a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a\") " pod="openstack/cinder-scheduler-0" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.629591 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.642412 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/94044a55-bd43-4551-a012-fc2c64f9e714-etc-machine-id\") pod \"cinder-api-0\" (UID: \"94044a55-bd43-4551-a012-fc2c64f9e714\") " pod="openstack/cinder-api-0" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.642509 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/484f9ea6-0202-42e7-bcf5-619d5b3176b4-dns-swift-storage-0\") pod \"dnsmasq-dns-6bb4fc677f-m2665\" (UID: \"484f9ea6-0202-42e7-bcf5-619d5b3176b4\") " pod="openstack/dnsmasq-dns-6bb4fc677f-m2665" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.642550 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g8lw6\" (UniqueName: \"kubernetes.io/projected/484f9ea6-0202-42e7-bcf5-619d5b3176b4-kube-api-access-g8lw6\") pod \"dnsmasq-dns-6bb4fc677f-m2665\" (UID: \"484f9ea6-0202-42e7-bcf5-619d5b3176b4\") " pod="openstack/dnsmasq-dns-6bb4fc677f-m2665" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.642571 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/484f9ea6-0202-42e7-bcf5-619d5b3176b4-ovsdbserver-sb\") pod \"dnsmasq-dns-6bb4fc677f-m2665\" (UID: \"484f9ea6-0202-42e7-bcf5-619d5b3176b4\") " pod="openstack/dnsmasq-dns-6bb4fc677f-m2665" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.642591 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/484f9ea6-0202-42e7-bcf5-619d5b3176b4-dns-svc\") pod \"dnsmasq-dns-6bb4fc677f-m2665\" (UID: \"484f9ea6-0202-42e7-bcf5-619d5b3176b4\") " pod="openstack/dnsmasq-dns-6bb4fc677f-m2665" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.642606 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/94044a55-bd43-4551-a012-fc2c64f9e714-config-data-custom\") pod \"cinder-api-0\" (UID: \"94044a55-bd43-4551-a012-fc2c64f9e714\") " pod="openstack/cinder-api-0" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.642637 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/484f9ea6-0202-42e7-bcf5-619d5b3176b4-ovsdbserver-nb\") pod \"dnsmasq-dns-6bb4fc677f-m2665\" (UID: \"484f9ea6-0202-42e7-bcf5-619d5b3176b4\") " pod="openstack/dnsmasq-dns-6bb4fc677f-m2665" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.642654 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94044a55-bd43-4551-a012-fc2c64f9e714-config-data\") pod \"cinder-api-0\" (UID: \"94044a55-bd43-4551-a012-fc2c64f9e714\") " pod="openstack/cinder-api-0" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.642674 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94044a55-bd43-4551-a012-fc2c64f9e714-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"94044a55-bd43-4551-a012-fc2c64f9e714\") " pod="openstack/cinder-api-0" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.642712 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/484f9ea6-0202-42e7-bcf5-619d5b3176b4-config\") pod \"dnsmasq-dns-6bb4fc677f-m2665\" (UID: \"484f9ea6-0202-42e7-bcf5-619d5b3176b4\") " pod="openstack/dnsmasq-dns-6bb4fc677f-m2665" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.642735 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/94044a55-bd43-4551-a012-fc2c64f9e714-logs\") pod \"cinder-api-0\" (UID: \"94044a55-bd43-4551-a012-fc2c64f9e714\") " pod="openstack/cinder-api-0" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.642754 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4rlmf\" (UniqueName: \"kubernetes.io/projected/94044a55-bd43-4551-a012-fc2c64f9e714-kube-api-access-4rlmf\") pod \"cinder-api-0\" (UID: \"94044a55-bd43-4551-a012-fc2c64f9e714\") " pod="openstack/cinder-api-0" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.642775 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/94044a55-bd43-4551-a012-fc2c64f9e714-scripts\") pod \"cinder-api-0\" (UID: \"94044a55-bd43-4551-a012-fc2c64f9e714\") " pod="openstack/cinder-api-0" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.744829 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/94044a55-bd43-4551-a012-fc2c64f9e714-etc-machine-id\") pod \"cinder-api-0\" (UID: \"94044a55-bd43-4551-a012-fc2c64f9e714\") " pod="openstack/cinder-api-0" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.745247 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/484f9ea6-0202-42e7-bcf5-619d5b3176b4-dns-swift-storage-0\") pod \"dnsmasq-dns-6bb4fc677f-m2665\" (UID: \"484f9ea6-0202-42e7-bcf5-619d5b3176b4\") " pod="openstack/dnsmasq-dns-6bb4fc677f-m2665" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.745293 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g8lw6\" (UniqueName: \"kubernetes.io/projected/484f9ea6-0202-42e7-bcf5-619d5b3176b4-kube-api-access-g8lw6\") pod \"dnsmasq-dns-6bb4fc677f-m2665\" (UID: \"484f9ea6-0202-42e7-bcf5-619d5b3176b4\") " pod="openstack/dnsmasq-dns-6bb4fc677f-m2665" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.745315 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/484f9ea6-0202-42e7-bcf5-619d5b3176b4-ovsdbserver-sb\") pod \"dnsmasq-dns-6bb4fc677f-m2665\" (UID: \"484f9ea6-0202-42e7-bcf5-619d5b3176b4\") " pod="openstack/dnsmasq-dns-6bb4fc677f-m2665" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.745337 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/484f9ea6-0202-42e7-bcf5-619d5b3176b4-dns-svc\") pod \"dnsmasq-dns-6bb4fc677f-m2665\" (UID: \"484f9ea6-0202-42e7-bcf5-619d5b3176b4\") " pod="openstack/dnsmasq-dns-6bb4fc677f-m2665" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.745354 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/94044a55-bd43-4551-a012-fc2c64f9e714-config-data-custom\") pod \"cinder-api-0\" (UID: \"94044a55-bd43-4551-a012-fc2c64f9e714\") " pod="openstack/cinder-api-0" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.745385 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/484f9ea6-0202-42e7-bcf5-619d5b3176b4-ovsdbserver-nb\") pod \"dnsmasq-dns-6bb4fc677f-m2665\" (UID: \"484f9ea6-0202-42e7-bcf5-619d5b3176b4\") " pod="openstack/dnsmasq-dns-6bb4fc677f-m2665" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.745405 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94044a55-bd43-4551-a012-fc2c64f9e714-config-data\") pod \"cinder-api-0\" (UID: \"94044a55-bd43-4551-a012-fc2c64f9e714\") " pod="openstack/cinder-api-0" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.745424 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94044a55-bd43-4551-a012-fc2c64f9e714-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"94044a55-bd43-4551-a012-fc2c64f9e714\") " pod="openstack/cinder-api-0" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.745463 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/484f9ea6-0202-42e7-bcf5-619d5b3176b4-config\") pod \"dnsmasq-dns-6bb4fc677f-m2665\" (UID: \"484f9ea6-0202-42e7-bcf5-619d5b3176b4\") " pod="openstack/dnsmasq-dns-6bb4fc677f-m2665" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.745491 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/94044a55-bd43-4551-a012-fc2c64f9e714-logs\") pod \"cinder-api-0\" (UID: \"94044a55-bd43-4551-a012-fc2c64f9e714\") " pod="openstack/cinder-api-0" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.745511 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4rlmf\" (UniqueName: \"kubernetes.io/projected/94044a55-bd43-4551-a012-fc2c64f9e714-kube-api-access-4rlmf\") pod \"cinder-api-0\" (UID: \"94044a55-bd43-4551-a012-fc2c64f9e714\") " pod="openstack/cinder-api-0" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.745538 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/94044a55-bd43-4551-a012-fc2c64f9e714-scripts\") pod \"cinder-api-0\" (UID: \"94044a55-bd43-4551-a012-fc2c64f9e714\") " pod="openstack/cinder-api-0" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.747697 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/94044a55-bd43-4551-a012-fc2c64f9e714-etc-machine-id\") pod \"cinder-api-0\" (UID: \"94044a55-bd43-4551-a012-fc2c64f9e714\") " pod="openstack/cinder-api-0" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.750610 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/94044a55-bd43-4551-a012-fc2c64f9e714-logs\") pod \"cinder-api-0\" (UID: \"94044a55-bd43-4551-a012-fc2c64f9e714\") " pod="openstack/cinder-api-0" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.753729 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/94044a55-bd43-4551-a012-fc2c64f9e714-config-data-custom\") pod \"cinder-api-0\" (UID: \"94044a55-bd43-4551-a012-fc2c64f9e714\") " pod="openstack/cinder-api-0" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.754585 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/94044a55-bd43-4551-a012-fc2c64f9e714-scripts\") pod \"cinder-api-0\" (UID: \"94044a55-bd43-4551-a012-fc2c64f9e714\") " pod="openstack/cinder-api-0" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.754964 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/484f9ea6-0202-42e7-bcf5-619d5b3176b4-config\") pod \"dnsmasq-dns-6bb4fc677f-m2665\" (UID: \"484f9ea6-0202-42e7-bcf5-619d5b3176b4\") " pod="openstack/dnsmasq-dns-6bb4fc677f-m2665" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.755091 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/484f9ea6-0202-42e7-bcf5-619d5b3176b4-dns-swift-storage-0\") pod \"dnsmasq-dns-6bb4fc677f-m2665\" (UID: \"484f9ea6-0202-42e7-bcf5-619d5b3176b4\") " pod="openstack/dnsmasq-dns-6bb4fc677f-m2665" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.758228 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94044a55-bd43-4551-a012-fc2c64f9e714-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"94044a55-bd43-4551-a012-fc2c64f9e714\") " pod="openstack/cinder-api-0" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.761176 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/484f9ea6-0202-42e7-bcf5-619d5b3176b4-dns-svc\") pod \"dnsmasq-dns-6bb4fc677f-m2665\" (UID: \"484f9ea6-0202-42e7-bcf5-619d5b3176b4\") " pod="openstack/dnsmasq-dns-6bb4fc677f-m2665" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.764088 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/484f9ea6-0202-42e7-bcf5-619d5b3176b4-ovsdbserver-nb\") pod \"dnsmasq-dns-6bb4fc677f-m2665\" (UID: \"484f9ea6-0202-42e7-bcf5-619d5b3176b4\") " pod="openstack/dnsmasq-dns-6bb4fc677f-m2665" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.764451 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/484f9ea6-0202-42e7-bcf5-619d5b3176b4-ovsdbserver-sb\") pod \"dnsmasq-dns-6bb4fc677f-m2665\" (UID: \"484f9ea6-0202-42e7-bcf5-619d5b3176b4\") " pod="openstack/dnsmasq-dns-6bb4fc677f-m2665" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.771915 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94044a55-bd43-4551-a012-fc2c64f9e714-config-data\") pod \"cinder-api-0\" (UID: \"94044a55-bd43-4551-a012-fc2c64f9e714\") " pod="openstack/cinder-api-0" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.773825 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g8lw6\" (UniqueName: \"kubernetes.io/projected/484f9ea6-0202-42e7-bcf5-619d5b3176b4-kube-api-access-g8lw6\") pod \"dnsmasq-dns-6bb4fc677f-m2665\" (UID: \"484f9ea6-0202-42e7-bcf5-619d5b3176b4\") " pod="openstack/dnsmasq-dns-6bb4fc677f-m2665" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.798891 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4rlmf\" (UniqueName: \"kubernetes.io/projected/94044a55-bd43-4551-a012-fc2c64f9e714-kube-api-access-4rlmf\") pod \"cinder-api-0\" (UID: \"94044a55-bd43-4551-a012-fc2c64f9e714\") " pod="openstack/cinder-api-0" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.965708 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bb4fc677f-m2665" Dec 05 17:34:55 crc kubenswrapper[4753]: I1205 17:34:55.982635 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 17:34:56 crc kubenswrapper[4753]: I1205 17:34:56.122678 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-storageinit-k8xt5"] Dec 05 17:34:56 crc kubenswrapper[4753]: W1205 17:34:56.129715 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod855d7063_d23a_4a28_a652_e78ae24fcc27.slice/crio-db73807b48e1ced827dd80666d16bd3587caa5eda74776727ff9dee9bf8fbc99 WatchSource:0}: Error finding container db73807b48e1ced827dd80666d16bd3587caa5eda74776727ff9dee9bf8fbc99: Status 404 returned error can't find the container with id db73807b48e1ced827dd80666d16bd3587caa5eda74776727ff9dee9bf8fbc99 Dec 05 17:34:56 crc kubenswrapper[4753]: I1205 17:34:56.422241 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 17:34:56 crc kubenswrapper[4753]: W1205 17:34:56.423977 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda73e4ddc_fbbe_4b4c_8b71_ba91b7ebb48a.slice/crio-c27db363a9a367b5cbec0d7b76a4d7098639d316ef155bfce61ba8eadd04e9d5 WatchSource:0}: Error finding container c27db363a9a367b5cbec0d7b76a4d7098639d316ef155bfce61ba8eadd04e9d5: Status 404 returned error can't find the container with id c27db363a9a367b5cbec0d7b76a4d7098639d316ef155bfce61ba8eadd04e9d5 Dec 05 17:34:56 crc kubenswrapper[4753]: I1205 17:34:56.633246 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 05 17:34:56 crc kubenswrapper[4753]: W1205 17:34:56.633599 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod94044a55_bd43_4551_a012_fc2c64f9e714.slice/crio-27871c09e1e56c82f614e94e2c499a31794139e328c81ab8ae4bda7c67f19e63 WatchSource:0}: Error finding container 27871c09e1e56c82f614e94e2c499a31794139e328c81ab8ae4bda7c67f19e63: Status 404 returned error can't find the container with id 27871c09e1e56c82f614e94e2c499a31794139e328c81ab8ae4bda7c67f19e63 Dec 05 17:34:56 crc kubenswrapper[4753]: I1205 17:34:56.787404 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6bb4fc677f-m2665"] Dec 05 17:34:56 crc kubenswrapper[4753]: I1205 17:34:56.810974 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-6d4f9bcbff-wqwgp" Dec 05 17:34:56 crc kubenswrapper[4753]: I1205 17:34:56.878199 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-c57d65f96-d2bw4"] Dec 05 17:34:56 crc kubenswrapper[4753]: I1205 17:34:56.878506 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-c57d65f96-d2bw4" podUID="c69accdb-191f-4ea8-905c-308492e5c663" containerName="neutron-api" containerID="cri-o://1ba8238bf495bb097c80467c828e210dfe78a57fcde23ab5146d7473dd1d52b6" gracePeriod=30 Dec 05 17:34:56 crc kubenswrapper[4753]: I1205 17:34:56.878949 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-c57d65f96-d2bw4" podUID="c69accdb-191f-4ea8-905c-308492e5c663" containerName="neutron-httpd" containerID="cri-o://c592035b030be2d64129c175ef02f565c911e76af14dc1910fe0d629104a3d69" gracePeriod=30 Dec 05 17:34:56 crc kubenswrapper[4753]: I1205 17:34:56.947649 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"94044a55-bd43-4551-a012-fc2c64f9e714","Type":"ContainerStarted","Data":"27871c09e1e56c82f614e94e2c499a31794139e328c81ab8ae4bda7c67f19e63"} Dec 05 17:34:56 crc kubenswrapper[4753]: I1205 17:34:56.949971 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bb4fc677f-m2665" event={"ID":"484f9ea6-0202-42e7-bcf5-619d5b3176b4","Type":"ContainerStarted","Data":"4b0e20daafb9374fcbdc96a640c696e84e161558a6753411ea51e75778b66c89"} Dec 05 17:34:56 crc kubenswrapper[4753]: I1205 17:34:56.952730 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-storageinit-k8xt5" event={"ID":"855d7063-d23a-4a28-a652-e78ae24fcc27","Type":"ContainerStarted","Data":"c5f58392ec576df89b4f9cd989a1fb83d6ddbae0091ba5b8bc4fbf6c2b5a7364"} Dec 05 17:34:56 crc kubenswrapper[4753]: I1205 17:34:56.952778 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-storageinit-k8xt5" event={"ID":"855d7063-d23a-4a28-a652-e78ae24fcc27","Type":"ContainerStarted","Data":"db73807b48e1ced827dd80666d16bd3587caa5eda74776727ff9dee9bf8fbc99"} Dec 05 17:34:56 crc kubenswrapper[4753]: I1205 17:34:56.956947 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a","Type":"ContainerStarted","Data":"c27db363a9a367b5cbec0d7b76a4d7098639d316ef155bfce61ba8eadd04e9d5"} Dec 05 17:34:56 crc kubenswrapper[4753]: I1205 17:34:56.956984 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-688c87cc99-gnhfq" podUID="daeb7979-ef24-45e4-888f-a917ecbec75f" containerName="dnsmasq-dns" containerID="cri-o://505a5c96eb831fd977437fb9178c97cc79c49886dc5df04c04f06d32b626a609" gracePeriod=10 Dec 05 17:34:56 crc kubenswrapper[4753]: I1205 17:34:56.985001 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-storageinit-k8xt5" podStartSLOduration=1.98498744 podStartE2EDuration="1.98498744s" podCreationTimestamp="2025-12-05 17:34:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:34:56.984472635 +0000 UTC m=+1835.487579641" watchObservedRunningTime="2025-12-05 17:34:56.98498744 +0000 UTC m=+1835.488094446" Dec 05 17:34:57 crc kubenswrapper[4753]: I1205 17:34:57.849011 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-688c87cc99-gnhfq" Dec 05 17:34:57 crc kubenswrapper[4753]: I1205 17:34:57.946883 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/daeb7979-ef24-45e4-888f-a917ecbec75f-config\") pod \"daeb7979-ef24-45e4-888f-a917ecbec75f\" (UID: \"daeb7979-ef24-45e4-888f-a917ecbec75f\") " Dec 05 17:34:57 crc kubenswrapper[4753]: I1205 17:34:57.946928 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/daeb7979-ef24-45e4-888f-a917ecbec75f-ovsdbserver-nb\") pod \"daeb7979-ef24-45e4-888f-a917ecbec75f\" (UID: \"daeb7979-ef24-45e4-888f-a917ecbec75f\") " Dec 05 17:34:57 crc kubenswrapper[4753]: I1205 17:34:57.948927 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ph574\" (UniqueName: \"kubernetes.io/projected/daeb7979-ef24-45e4-888f-a917ecbec75f-kube-api-access-ph574\") pod \"daeb7979-ef24-45e4-888f-a917ecbec75f\" (UID: \"daeb7979-ef24-45e4-888f-a917ecbec75f\") " Dec 05 17:34:57 crc kubenswrapper[4753]: I1205 17:34:57.949286 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/daeb7979-ef24-45e4-888f-a917ecbec75f-dns-svc\") pod \"daeb7979-ef24-45e4-888f-a917ecbec75f\" (UID: \"daeb7979-ef24-45e4-888f-a917ecbec75f\") " Dec 05 17:34:57 crc kubenswrapper[4753]: I1205 17:34:57.949359 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/daeb7979-ef24-45e4-888f-a917ecbec75f-ovsdbserver-sb\") pod \"daeb7979-ef24-45e4-888f-a917ecbec75f\" (UID: \"daeb7979-ef24-45e4-888f-a917ecbec75f\") " Dec 05 17:34:57 crc kubenswrapper[4753]: I1205 17:34:57.949413 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/daeb7979-ef24-45e4-888f-a917ecbec75f-dns-swift-storage-0\") pod \"daeb7979-ef24-45e4-888f-a917ecbec75f\" (UID: \"daeb7979-ef24-45e4-888f-a917ecbec75f\") " Dec 05 17:34:57 crc kubenswrapper[4753]: I1205 17:34:57.978090 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/daeb7979-ef24-45e4-888f-a917ecbec75f-kube-api-access-ph574" (OuterVolumeSpecName: "kube-api-access-ph574") pod "daeb7979-ef24-45e4-888f-a917ecbec75f" (UID: "daeb7979-ef24-45e4-888f-a917ecbec75f"). InnerVolumeSpecName "kube-api-access-ph574". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:34:57 crc kubenswrapper[4753]: I1205 17:34:57.998762 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"94044a55-bd43-4551-a012-fc2c64f9e714","Type":"ContainerStarted","Data":"10d8fede81dac32513f5c16fab3c843932c0827c2c6ba51f4ae15becabf70038"} Dec 05 17:34:58 crc kubenswrapper[4753]: I1205 17:34:58.008746 4753 generic.go:334] "Generic (PLEG): container finished" podID="484f9ea6-0202-42e7-bcf5-619d5b3176b4" containerID="b1e7eb7ad19d0be0fdc05aa216a8b7434ea026cb3ea4caf4dbed1d396196819d" exitCode=0 Dec 05 17:34:58 crc kubenswrapper[4753]: I1205 17:34:58.008807 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bb4fc677f-m2665" event={"ID":"484f9ea6-0202-42e7-bcf5-619d5b3176b4","Type":"ContainerDied","Data":"b1e7eb7ad19d0be0fdc05aa216a8b7434ea026cb3ea4caf4dbed1d396196819d"} Dec 05 17:34:58 crc kubenswrapper[4753]: I1205 17:34:58.013429 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/daeb7979-ef24-45e4-888f-a917ecbec75f-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "daeb7979-ef24-45e4-888f-a917ecbec75f" (UID: "daeb7979-ef24-45e4-888f-a917ecbec75f"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:34:58 crc kubenswrapper[4753]: I1205 17:34:58.016945 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/daeb7979-ef24-45e4-888f-a917ecbec75f-config" (OuterVolumeSpecName: "config") pod "daeb7979-ef24-45e4-888f-a917ecbec75f" (UID: "daeb7979-ef24-45e4-888f-a917ecbec75f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:34:58 crc kubenswrapper[4753]: I1205 17:34:58.018210 4753 generic.go:334] "Generic (PLEG): container finished" podID="daeb7979-ef24-45e4-888f-a917ecbec75f" containerID="505a5c96eb831fd977437fb9178c97cc79c49886dc5df04c04f06d32b626a609" exitCode=0 Dec 05 17:34:58 crc kubenswrapper[4753]: I1205 17:34:58.018306 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-688c87cc99-gnhfq" event={"ID":"daeb7979-ef24-45e4-888f-a917ecbec75f","Type":"ContainerDied","Data":"505a5c96eb831fd977437fb9178c97cc79c49886dc5df04c04f06d32b626a609"} Dec 05 17:34:58 crc kubenswrapper[4753]: I1205 17:34:58.018387 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-688c87cc99-gnhfq" Dec 05 17:34:58 crc kubenswrapper[4753]: I1205 17:34:58.018527 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-688c87cc99-gnhfq" event={"ID":"daeb7979-ef24-45e4-888f-a917ecbec75f","Type":"ContainerDied","Data":"6aae9b1bd7302abfe5975f2a289519d21f0d8bb714682e375d9f647033cadc05"} Dec 05 17:34:58 crc kubenswrapper[4753]: I1205 17:34:58.018552 4753 scope.go:117] "RemoveContainer" containerID="505a5c96eb831fd977437fb9178c97cc79c49886dc5df04c04f06d32b626a609" Dec 05 17:34:58 crc kubenswrapper[4753]: I1205 17:34:58.039796 4753 generic.go:334] "Generic (PLEG): container finished" podID="c69accdb-191f-4ea8-905c-308492e5c663" containerID="c592035b030be2d64129c175ef02f565c911e76af14dc1910fe0d629104a3d69" exitCode=0 Dec 05 17:34:58 crc kubenswrapper[4753]: I1205 17:34:58.040838 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-c57d65f96-d2bw4" event={"ID":"c69accdb-191f-4ea8-905c-308492e5c663","Type":"ContainerDied","Data":"c592035b030be2d64129c175ef02f565c911e76af14dc1910fe0d629104a3d69"} Dec 05 17:34:58 crc kubenswrapper[4753]: I1205 17:34:58.052431 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ph574\" (UniqueName: \"kubernetes.io/projected/daeb7979-ef24-45e4-888f-a917ecbec75f-kube-api-access-ph574\") on node \"crc\" DevicePath \"\"" Dec 05 17:34:58 crc kubenswrapper[4753]: I1205 17:34:58.052468 4753 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/daeb7979-ef24-45e4-888f-a917ecbec75f-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:34:58 crc kubenswrapper[4753]: I1205 17:34:58.052481 4753 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/daeb7979-ef24-45e4-888f-a917ecbec75f-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 17:34:58 crc kubenswrapper[4753]: I1205 17:34:58.060580 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/daeb7979-ef24-45e4-888f-a917ecbec75f-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "daeb7979-ef24-45e4-888f-a917ecbec75f" (UID: "daeb7979-ef24-45e4-888f-a917ecbec75f"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:34:58 crc kubenswrapper[4753]: I1205 17:34:58.078827 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/daeb7979-ef24-45e4-888f-a917ecbec75f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "daeb7979-ef24-45e4-888f-a917ecbec75f" (UID: "daeb7979-ef24-45e4-888f-a917ecbec75f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:34:58 crc kubenswrapper[4753]: I1205 17:34:58.110670 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/daeb7979-ef24-45e4-888f-a917ecbec75f-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "daeb7979-ef24-45e4-888f-a917ecbec75f" (UID: "daeb7979-ef24-45e4-888f-a917ecbec75f"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:34:58 crc kubenswrapper[4753]: I1205 17:34:58.154974 4753 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/daeb7979-ef24-45e4-888f-a917ecbec75f-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 17:34:58 crc kubenswrapper[4753]: I1205 17:34:58.155007 4753 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/daeb7979-ef24-45e4-888f-a917ecbec75f-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 17:34:58 crc kubenswrapper[4753]: I1205 17:34:58.155018 4753 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/daeb7979-ef24-45e4-888f-a917ecbec75f-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 17:34:58 crc kubenswrapper[4753]: I1205 17:34:58.162216 4753 scope.go:117] "RemoveContainer" containerID="6b165c416932a31c920301856cfa1d870b5fef69dd15a4fa75e1632504ff700b" Dec 05 17:34:58 crc kubenswrapper[4753]: I1205 17:34:58.230276 4753 scope.go:117] "RemoveContainer" containerID="505a5c96eb831fd977437fb9178c97cc79c49886dc5df04c04f06d32b626a609" Dec 05 17:34:58 crc kubenswrapper[4753]: E1205 17:34:58.231685 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"505a5c96eb831fd977437fb9178c97cc79c49886dc5df04c04f06d32b626a609\": container with ID starting with 505a5c96eb831fd977437fb9178c97cc79c49886dc5df04c04f06d32b626a609 not found: ID does not exist" containerID="505a5c96eb831fd977437fb9178c97cc79c49886dc5df04c04f06d32b626a609" Dec 05 17:34:58 crc kubenswrapper[4753]: I1205 17:34:58.231730 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"505a5c96eb831fd977437fb9178c97cc79c49886dc5df04c04f06d32b626a609"} err="failed to get container status \"505a5c96eb831fd977437fb9178c97cc79c49886dc5df04c04f06d32b626a609\": rpc error: code = NotFound desc = could not find container \"505a5c96eb831fd977437fb9178c97cc79c49886dc5df04c04f06d32b626a609\": container with ID starting with 505a5c96eb831fd977437fb9178c97cc79c49886dc5df04c04f06d32b626a609 not found: ID does not exist" Dec 05 17:34:58 crc kubenswrapper[4753]: I1205 17:34:58.231761 4753 scope.go:117] "RemoveContainer" containerID="6b165c416932a31c920301856cfa1d870b5fef69dd15a4fa75e1632504ff700b" Dec 05 17:34:58 crc kubenswrapper[4753]: E1205 17:34:58.234537 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6b165c416932a31c920301856cfa1d870b5fef69dd15a4fa75e1632504ff700b\": container with ID starting with 6b165c416932a31c920301856cfa1d870b5fef69dd15a4fa75e1632504ff700b not found: ID does not exist" containerID="6b165c416932a31c920301856cfa1d870b5fef69dd15a4fa75e1632504ff700b" Dec 05 17:34:58 crc kubenswrapper[4753]: I1205 17:34:58.234608 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6b165c416932a31c920301856cfa1d870b5fef69dd15a4fa75e1632504ff700b"} err="failed to get container status \"6b165c416932a31c920301856cfa1d870b5fef69dd15a4fa75e1632504ff700b\": rpc error: code = NotFound desc = could not find container \"6b165c416932a31c920301856cfa1d870b5fef69dd15a4fa75e1632504ff700b\": container with ID starting with 6b165c416932a31c920301856cfa1d870b5fef69dd15a4fa75e1632504ff700b not found: ID does not exist" Dec 05 17:34:58 crc kubenswrapper[4753]: I1205 17:34:58.325653 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 05 17:34:58 crc kubenswrapper[4753]: I1205 17:34:58.353772 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-688c87cc99-gnhfq"] Dec 05 17:34:58 crc kubenswrapper[4753]: I1205 17:34:58.362545 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-688c87cc99-gnhfq"] Dec 05 17:34:59 crc kubenswrapper[4753]: I1205 17:34:59.069199 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"94044a55-bd43-4551-a012-fc2c64f9e714","Type":"ContainerStarted","Data":"ab79a325f4ebdba589abd5a4fcf00ea792b019524e148e90a62c726d40f20651"} Dec 05 17:34:59 crc kubenswrapper[4753]: I1205 17:34:59.069745 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="94044a55-bd43-4551-a012-fc2c64f9e714" containerName="cinder-api-log" containerID="cri-o://10d8fede81dac32513f5c16fab3c843932c0827c2c6ba51f4ae15becabf70038" gracePeriod=30 Dec 05 17:34:59 crc kubenswrapper[4753]: I1205 17:34:59.070071 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 05 17:34:59 crc kubenswrapper[4753]: I1205 17:34:59.070330 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="94044a55-bd43-4551-a012-fc2c64f9e714" containerName="cinder-api" containerID="cri-o://ab79a325f4ebdba589abd5a4fcf00ea792b019524e148e90a62c726d40f20651" gracePeriod=30 Dec 05 17:34:59 crc kubenswrapper[4753]: I1205 17:34:59.076621 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bb4fc677f-m2665" event={"ID":"484f9ea6-0202-42e7-bcf5-619d5b3176b4","Type":"ContainerStarted","Data":"22e1671dbce4385b0ee58f854736173a188b8e035ae3f0f166b4f8027b54a4fe"} Dec 05 17:34:59 crc kubenswrapper[4753]: I1205 17:34:59.085623 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6bb4fc677f-m2665" Dec 05 17:34:59 crc kubenswrapper[4753]: I1205 17:34:59.104971 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=4.104947154 podStartE2EDuration="4.104947154s" podCreationTimestamp="2025-12-05 17:34:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:34:59.103288686 +0000 UTC m=+1837.606395692" watchObservedRunningTime="2025-12-05 17:34:59.104947154 +0000 UTC m=+1837.608054160" Dec 05 17:34:59 crc kubenswrapper[4753]: I1205 17:34:59.125339 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6bb4fc677f-m2665" podStartSLOduration=4.125322092 podStartE2EDuration="4.125322092s" podCreationTimestamp="2025-12-05 17:34:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:34:59.123912062 +0000 UTC m=+1837.627019108" watchObservedRunningTime="2025-12-05 17:34:59.125322092 +0000 UTC m=+1837.628429098" Dec 05 17:34:59 crc kubenswrapper[4753]: I1205 17:34:59.127028 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a","Type":"ContainerStarted","Data":"de921abec4ad1a36d869bb9bfd2da82c694d12d0f9e9021565874ace05d6ffcf"} Dec 05 17:34:59 crc kubenswrapper[4753]: I1205 17:34:59.738432 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="daeb7979-ef24-45e4-888f-a917ecbec75f" path="/var/lib/kubelet/pods/daeb7979-ef24-45e4-888f-a917ecbec75f/volumes" Dec 05 17:35:00 crc kubenswrapper[4753]: I1205 17:35:00.175313 4753 generic.go:334] "Generic (PLEG): container finished" podID="94044a55-bd43-4551-a012-fc2c64f9e714" containerID="ab79a325f4ebdba589abd5a4fcf00ea792b019524e148e90a62c726d40f20651" exitCode=0 Dec 05 17:35:00 crc kubenswrapper[4753]: I1205 17:35:00.175557 4753 generic.go:334] "Generic (PLEG): container finished" podID="94044a55-bd43-4551-a012-fc2c64f9e714" containerID="10d8fede81dac32513f5c16fab3c843932c0827c2c6ba51f4ae15becabf70038" exitCode=143 Dec 05 17:35:00 crc kubenswrapper[4753]: I1205 17:35:00.175607 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"94044a55-bd43-4551-a012-fc2c64f9e714","Type":"ContainerDied","Data":"ab79a325f4ebdba589abd5a4fcf00ea792b019524e148e90a62c726d40f20651"} Dec 05 17:35:00 crc kubenswrapper[4753]: I1205 17:35:00.175636 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"94044a55-bd43-4551-a012-fc2c64f9e714","Type":"ContainerDied","Data":"10d8fede81dac32513f5c16fab3c843932c0827c2c6ba51f4ae15becabf70038"} Dec 05 17:35:00 crc kubenswrapper[4753]: I1205 17:35:00.178526 4753 generic.go:334] "Generic (PLEG): container finished" podID="855d7063-d23a-4a28-a652-e78ae24fcc27" containerID="c5f58392ec576df89b4f9cd989a1fb83d6ddbae0091ba5b8bc4fbf6c2b5a7364" exitCode=0 Dec 05 17:35:00 crc kubenswrapper[4753]: I1205 17:35:00.178590 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-storageinit-k8xt5" event={"ID":"855d7063-d23a-4a28-a652-e78ae24fcc27","Type":"ContainerDied","Data":"c5f58392ec576df89b4f9cd989a1fb83d6ddbae0091ba5b8bc4fbf6c2b5a7364"} Dec 05 17:35:00 crc kubenswrapper[4753]: I1205 17:35:00.183218 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a","Type":"ContainerStarted","Data":"d69e947bd25344ecf8280fbfa546202f34127171ef1438b3beab67053788ba94"} Dec 05 17:35:00 crc kubenswrapper[4753]: I1205 17:35:00.220538 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=4.344474613 podStartE2EDuration="5.220522514s" podCreationTimestamp="2025-12-05 17:34:55 +0000 UTC" firstStartedPulling="2025-12-05 17:34:56.429780418 +0000 UTC m=+1834.932887424" lastFinishedPulling="2025-12-05 17:34:57.305828319 +0000 UTC m=+1835.808935325" observedRunningTime="2025-12-05 17:35:00.214595606 +0000 UTC m=+1838.717702612" watchObservedRunningTime="2025-12-05 17:35:00.220522514 +0000 UTC m=+1838.723629520" Dec 05 17:35:00 crc kubenswrapper[4753]: I1205 17:35:00.240853 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 17:35:00 crc kubenswrapper[4753]: I1205 17:35:00.315535 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94044a55-bd43-4551-a012-fc2c64f9e714-config-data\") pod \"94044a55-bd43-4551-a012-fc2c64f9e714\" (UID: \"94044a55-bd43-4551-a012-fc2c64f9e714\") " Dec 05 17:35:00 crc kubenswrapper[4753]: I1205 17:35:00.315661 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/94044a55-bd43-4551-a012-fc2c64f9e714-scripts\") pod \"94044a55-bd43-4551-a012-fc2c64f9e714\" (UID: \"94044a55-bd43-4551-a012-fc2c64f9e714\") " Dec 05 17:35:00 crc kubenswrapper[4753]: I1205 17:35:00.315688 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/94044a55-bd43-4551-a012-fc2c64f9e714-logs\") pod \"94044a55-bd43-4551-a012-fc2c64f9e714\" (UID: \"94044a55-bd43-4551-a012-fc2c64f9e714\") " Dec 05 17:35:00 crc kubenswrapper[4753]: I1205 17:35:00.316299 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/94044a55-bd43-4551-a012-fc2c64f9e714-config-data-custom\") pod \"94044a55-bd43-4551-a012-fc2c64f9e714\" (UID: \"94044a55-bd43-4551-a012-fc2c64f9e714\") " Dec 05 17:35:00 crc kubenswrapper[4753]: I1205 17:35:00.316617 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/94044a55-bd43-4551-a012-fc2c64f9e714-etc-machine-id\") pod \"94044a55-bd43-4551-a012-fc2c64f9e714\" (UID: \"94044a55-bd43-4551-a012-fc2c64f9e714\") " Dec 05 17:35:00 crc kubenswrapper[4753]: I1205 17:35:00.316663 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94044a55-bd43-4551-a012-fc2c64f9e714-combined-ca-bundle\") pod \"94044a55-bd43-4551-a012-fc2c64f9e714\" (UID: \"94044a55-bd43-4551-a012-fc2c64f9e714\") " Dec 05 17:35:00 crc kubenswrapper[4753]: I1205 17:35:00.316684 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4rlmf\" (UniqueName: \"kubernetes.io/projected/94044a55-bd43-4551-a012-fc2c64f9e714-kube-api-access-4rlmf\") pod \"94044a55-bd43-4551-a012-fc2c64f9e714\" (UID: \"94044a55-bd43-4551-a012-fc2c64f9e714\") " Dec 05 17:35:00 crc kubenswrapper[4753]: I1205 17:35:00.323949 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/94044a55-bd43-4551-a012-fc2c64f9e714-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "94044a55-bd43-4551-a012-fc2c64f9e714" (UID: "94044a55-bd43-4551-a012-fc2c64f9e714"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 17:35:00 crc kubenswrapper[4753]: I1205 17:35:00.324245 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/94044a55-bd43-4551-a012-fc2c64f9e714-logs" (OuterVolumeSpecName: "logs") pod "94044a55-bd43-4551-a012-fc2c64f9e714" (UID: "94044a55-bd43-4551-a012-fc2c64f9e714"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:35:00 crc kubenswrapper[4753]: I1205 17:35:00.331397 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/94044a55-bd43-4551-a012-fc2c64f9e714-kube-api-access-4rlmf" (OuterVolumeSpecName: "kube-api-access-4rlmf") pod "94044a55-bd43-4551-a012-fc2c64f9e714" (UID: "94044a55-bd43-4551-a012-fc2c64f9e714"). InnerVolumeSpecName "kube-api-access-4rlmf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:35:00 crc kubenswrapper[4753]: I1205 17:35:00.332254 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94044a55-bd43-4551-a012-fc2c64f9e714-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "94044a55-bd43-4551-a012-fc2c64f9e714" (UID: "94044a55-bd43-4551-a012-fc2c64f9e714"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:35:00 crc kubenswrapper[4753]: I1205 17:35:00.357321 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94044a55-bd43-4551-a012-fc2c64f9e714-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "94044a55-bd43-4551-a012-fc2c64f9e714" (UID: "94044a55-bd43-4551-a012-fc2c64f9e714"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:35:00 crc kubenswrapper[4753]: I1205 17:35:00.390006 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94044a55-bd43-4551-a012-fc2c64f9e714-scripts" (OuterVolumeSpecName: "scripts") pod "94044a55-bd43-4551-a012-fc2c64f9e714" (UID: "94044a55-bd43-4551-a012-fc2c64f9e714"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:35:00 crc kubenswrapper[4753]: I1205 17:35:00.419277 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94044a55-bd43-4551-a012-fc2c64f9e714-config-data" (OuterVolumeSpecName: "config-data") pod "94044a55-bd43-4551-a012-fc2c64f9e714" (UID: "94044a55-bd43-4551-a012-fc2c64f9e714"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:35:00 crc kubenswrapper[4753]: I1205 17:35:00.420610 4753 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94044a55-bd43-4551-a012-fc2c64f9e714-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:00 crc kubenswrapper[4753]: I1205 17:35:00.420631 4753 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/94044a55-bd43-4551-a012-fc2c64f9e714-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:00 crc kubenswrapper[4753]: I1205 17:35:00.420640 4753 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/94044a55-bd43-4551-a012-fc2c64f9e714-logs\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:00 crc kubenswrapper[4753]: I1205 17:35:00.420648 4753 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/94044a55-bd43-4551-a012-fc2c64f9e714-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:00 crc kubenswrapper[4753]: I1205 17:35:00.420660 4753 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/94044a55-bd43-4551-a012-fc2c64f9e714-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:00 crc kubenswrapper[4753]: I1205 17:35:00.420668 4753 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94044a55-bd43-4551-a012-fc2c64f9e714-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:00 crc kubenswrapper[4753]: I1205 17:35:00.420678 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4rlmf\" (UniqueName: \"kubernetes.io/projected/94044a55-bd43-4551-a012-fc2c64f9e714-kube-api-access-4rlmf\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:00 crc kubenswrapper[4753]: I1205 17:35:00.631220 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 05 17:35:01 crc kubenswrapper[4753]: I1205 17:35:01.196086 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 17:35:01 crc kubenswrapper[4753]: I1205 17:35:01.199516 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"94044a55-bd43-4551-a012-fc2c64f9e714","Type":"ContainerDied","Data":"27871c09e1e56c82f614e94e2c499a31794139e328c81ab8ae4bda7c67f19e63"} Dec 05 17:35:01 crc kubenswrapper[4753]: I1205 17:35:01.199603 4753 scope.go:117] "RemoveContainer" containerID="ab79a325f4ebdba589abd5a4fcf00ea792b019524e148e90a62c726d40f20651" Dec 05 17:35:01 crc kubenswrapper[4753]: I1205 17:35:01.242679 4753 scope.go:117] "RemoveContainer" containerID="10d8fede81dac32513f5c16fab3c843932c0827c2c6ba51f4ae15becabf70038" Dec 05 17:35:01 crc kubenswrapper[4753]: I1205 17:35:01.248470 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 05 17:35:01 crc kubenswrapper[4753]: I1205 17:35:01.267287 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Dec 05 17:35:01 crc kubenswrapper[4753]: I1205 17:35:01.279622 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 05 17:35:01 crc kubenswrapper[4753]: E1205 17:35:01.280130 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="daeb7979-ef24-45e4-888f-a917ecbec75f" containerName="dnsmasq-dns" Dec 05 17:35:01 crc kubenswrapper[4753]: I1205 17:35:01.280175 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="daeb7979-ef24-45e4-888f-a917ecbec75f" containerName="dnsmasq-dns" Dec 05 17:35:01 crc kubenswrapper[4753]: E1205 17:35:01.280215 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94044a55-bd43-4551-a012-fc2c64f9e714" containerName="cinder-api-log" Dec 05 17:35:01 crc kubenswrapper[4753]: I1205 17:35:01.280225 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="94044a55-bd43-4551-a012-fc2c64f9e714" containerName="cinder-api-log" Dec 05 17:35:01 crc kubenswrapper[4753]: E1205 17:35:01.280243 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="daeb7979-ef24-45e4-888f-a917ecbec75f" containerName="init" Dec 05 17:35:01 crc kubenswrapper[4753]: I1205 17:35:01.280253 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="daeb7979-ef24-45e4-888f-a917ecbec75f" containerName="init" Dec 05 17:35:01 crc kubenswrapper[4753]: E1205 17:35:01.280284 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94044a55-bd43-4551-a012-fc2c64f9e714" containerName="cinder-api" Dec 05 17:35:01 crc kubenswrapper[4753]: I1205 17:35:01.280312 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="94044a55-bd43-4551-a012-fc2c64f9e714" containerName="cinder-api" Dec 05 17:35:01 crc kubenswrapper[4753]: I1205 17:35:01.280582 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="94044a55-bd43-4551-a012-fc2c64f9e714" containerName="cinder-api" Dec 05 17:35:01 crc kubenswrapper[4753]: I1205 17:35:01.280615 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="daeb7979-ef24-45e4-888f-a917ecbec75f" containerName="dnsmasq-dns" Dec 05 17:35:01 crc kubenswrapper[4753]: I1205 17:35:01.280630 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="94044a55-bd43-4551-a012-fc2c64f9e714" containerName="cinder-api-log" Dec 05 17:35:01 crc kubenswrapper[4753]: I1205 17:35:01.282111 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 17:35:01 crc kubenswrapper[4753]: I1205 17:35:01.285553 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 05 17:35:01 crc kubenswrapper[4753]: I1205 17:35:01.285841 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Dec 05 17:35:01 crc kubenswrapper[4753]: I1205 17:35:01.285951 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Dec 05 17:35:01 crc kubenswrapper[4753]: I1205 17:35:01.310998 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 05 17:35:01 crc kubenswrapper[4753]: I1205 17:35:01.340499 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8adad7e9-de7d-440a-9ac9-55882e2fd944-scripts\") pod \"cinder-api-0\" (UID: \"8adad7e9-de7d-440a-9ac9-55882e2fd944\") " pod="openstack/cinder-api-0" Dec 05 17:35:01 crc kubenswrapper[4753]: I1205 17:35:01.340544 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8adad7e9-de7d-440a-9ac9-55882e2fd944-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"8adad7e9-de7d-440a-9ac9-55882e2fd944\") " pod="openstack/cinder-api-0" Dec 05 17:35:01 crc kubenswrapper[4753]: I1205 17:35:01.340590 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8adad7e9-de7d-440a-9ac9-55882e2fd944-config-data\") pod \"cinder-api-0\" (UID: \"8adad7e9-de7d-440a-9ac9-55882e2fd944\") " pod="openstack/cinder-api-0" Dec 05 17:35:01 crc kubenswrapper[4753]: I1205 17:35:01.340614 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-64fsr\" (UniqueName: \"kubernetes.io/projected/8adad7e9-de7d-440a-9ac9-55882e2fd944-kube-api-access-64fsr\") pod \"cinder-api-0\" (UID: \"8adad7e9-de7d-440a-9ac9-55882e2fd944\") " pod="openstack/cinder-api-0" Dec 05 17:35:01 crc kubenswrapper[4753]: I1205 17:35:01.340657 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8adad7e9-de7d-440a-9ac9-55882e2fd944-config-data-custom\") pod \"cinder-api-0\" (UID: \"8adad7e9-de7d-440a-9ac9-55882e2fd944\") " pod="openstack/cinder-api-0" Dec 05 17:35:01 crc kubenswrapper[4753]: I1205 17:35:01.340676 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8adad7e9-de7d-440a-9ac9-55882e2fd944-public-tls-certs\") pod \"cinder-api-0\" (UID: \"8adad7e9-de7d-440a-9ac9-55882e2fd944\") " pod="openstack/cinder-api-0" Dec 05 17:35:01 crc kubenswrapper[4753]: I1205 17:35:01.340706 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8adad7e9-de7d-440a-9ac9-55882e2fd944-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"8adad7e9-de7d-440a-9ac9-55882e2fd944\") " pod="openstack/cinder-api-0" Dec 05 17:35:01 crc kubenswrapper[4753]: I1205 17:35:01.340747 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8adad7e9-de7d-440a-9ac9-55882e2fd944-logs\") pod \"cinder-api-0\" (UID: \"8adad7e9-de7d-440a-9ac9-55882e2fd944\") " pod="openstack/cinder-api-0" Dec 05 17:35:01 crc kubenswrapper[4753]: I1205 17:35:01.340805 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8adad7e9-de7d-440a-9ac9-55882e2fd944-etc-machine-id\") pod \"cinder-api-0\" (UID: \"8adad7e9-de7d-440a-9ac9-55882e2fd944\") " pod="openstack/cinder-api-0" Dec 05 17:35:01 crc kubenswrapper[4753]: I1205 17:35:01.445403 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8adad7e9-de7d-440a-9ac9-55882e2fd944-scripts\") pod \"cinder-api-0\" (UID: \"8adad7e9-de7d-440a-9ac9-55882e2fd944\") " pod="openstack/cinder-api-0" Dec 05 17:35:01 crc kubenswrapper[4753]: I1205 17:35:01.445461 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8adad7e9-de7d-440a-9ac9-55882e2fd944-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"8adad7e9-de7d-440a-9ac9-55882e2fd944\") " pod="openstack/cinder-api-0" Dec 05 17:35:01 crc kubenswrapper[4753]: I1205 17:35:01.445504 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8adad7e9-de7d-440a-9ac9-55882e2fd944-config-data\") pod \"cinder-api-0\" (UID: \"8adad7e9-de7d-440a-9ac9-55882e2fd944\") " pod="openstack/cinder-api-0" Dec 05 17:35:01 crc kubenswrapper[4753]: I1205 17:35:01.445544 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-64fsr\" (UniqueName: \"kubernetes.io/projected/8adad7e9-de7d-440a-9ac9-55882e2fd944-kube-api-access-64fsr\") pod \"cinder-api-0\" (UID: \"8adad7e9-de7d-440a-9ac9-55882e2fd944\") " pod="openstack/cinder-api-0" Dec 05 17:35:01 crc kubenswrapper[4753]: I1205 17:35:01.445586 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8adad7e9-de7d-440a-9ac9-55882e2fd944-config-data-custom\") pod \"cinder-api-0\" (UID: \"8adad7e9-de7d-440a-9ac9-55882e2fd944\") " pod="openstack/cinder-api-0" Dec 05 17:35:01 crc kubenswrapper[4753]: I1205 17:35:01.445606 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8adad7e9-de7d-440a-9ac9-55882e2fd944-public-tls-certs\") pod \"cinder-api-0\" (UID: \"8adad7e9-de7d-440a-9ac9-55882e2fd944\") " pod="openstack/cinder-api-0" Dec 05 17:35:01 crc kubenswrapper[4753]: I1205 17:35:01.445640 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8adad7e9-de7d-440a-9ac9-55882e2fd944-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"8adad7e9-de7d-440a-9ac9-55882e2fd944\") " pod="openstack/cinder-api-0" Dec 05 17:35:01 crc kubenswrapper[4753]: I1205 17:35:01.445691 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8adad7e9-de7d-440a-9ac9-55882e2fd944-logs\") pod \"cinder-api-0\" (UID: \"8adad7e9-de7d-440a-9ac9-55882e2fd944\") " pod="openstack/cinder-api-0" Dec 05 17:35:01 crc kubenswrapper[4753]: I1205 17:35:01.445760 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8adad7e9-de7d-440a-9ac9-55882e2fd944-etc-machine-id\") pod \"cinder-api-0\" (UID: \"8adad7e9-de7d-440a-9ac9-55882e2fd944\") " pod="openstack/cinder-api-0" Dec 05 17:35:01 crc kubenswrapper[4753]: I1205 17:35:01.445889 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8adad7e9-de7d-440a-9ac9-55882e2fd944-etc-machine-id\") pod \"cinder-api-0\" (UID: \"8adad7e9-de7d-440a-9ac9-55882e2fd944\") " pod="openstack/cinder-api-0" Dec 05 17:35:01 crc kubenswrapper[4753]: I1205 17:35:01.447685 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8adad7e9-de7d-440a-9ac9-55882e2fd944-logs\") pod \"cinder-api-0\" (UID: \"8adad7e9-de7d-440a-9ac9-55882e2fd944\") " pod="openstack/cinder-api-0" Dec 05 17:35:01 crc kubenswrapper[4753]: I1205 17:35:01.453538 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8adad7e9-de7d-440a-9ac9-55882e2fd944-scripts\") pod \"cinder-api-0\" (UID: \"8adad7e9-de7d-440a-9ac9-55882e2fd944\") " pod="openstack/cinder-api-0" Dec 05 17:35:01 crc kubenswrapper[4753]: I1205 17:35:01.454966 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8adad7e9-de7d-440a-9ac9-55882e2fd944-config-data-custom\") pod \"cinder-api-0\" (UID: \"8adad7e9-de7d-440a-9ac9-55882e2fd944\") " pod="openstack/cinder-api-0" Dec 05 17:35:01 crc kubenswrapper[4753]: I1205 17:35:01.457803 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8adad7e9-de7d-440a-9ac9-55882e2fd944-public-tls-certs\") pod \"cinder-api-0\" (UID: \"8adad7e9-de7d-440a-9ac9-55882e2fd944\") " pod="openstack/cinder-api-0" Dec 05 17:35:01 crc kubenswrapper[4753]: I1205 17:35:01.457935 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8adad7e9-de7d-440a-9ac9-55882e2fd944-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"8adad7e9-de7d-440a-9ac9-55882e2fd944\") " pod="openstack/cinder-api-0" Dec 05 17:35:01 crc kubenswrapper[4753]: I1205 17:35:01.462161 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8adad7e9-de7d-440a-9ac9-55882e2fd944-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"8adad7e9-de7d-440a-9ac9-55882e2fd944\") " pod="openstack/cinder-api-0" Dec 05 17:35:01 crc kubenswrapper[4753]: I1205 17:35:01.463067 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8adad7e9-de7d-440a-9ac9-55882e2fd944-config-data\") pod \"cinder-api-0\" (UID: \"8adad7e9-de7d-440a-9ac9-55882e2fd944\") " pod="openstack/cinder-api-0" Dec 05 17:35:01 crc kubenswrapper[4753]: I1205 17:35:01.467857 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-64fsr\" (UniqueName: \"kubernetes.io/projected/8adad7e9-de7d-440a-9ac9-55882e2fd944-kube-api-access-64fsr\") pod \"cinder-api-0\" (UID: \"8adad7e9-de7d-440a-9ac9-55882e2fd944\") " pod="openstack/cinder-api-0" Dec 05 17:35:01 crc kubenswrapper[4753]: I1205 17:35:01.606660 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 17:35:01 crc kubenswrapper[4753]: I1205 17:35:01.745906 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="94044a55-bd43-4551-a012-fc2c64f9e714" path="/var/lib/kubelet/pods/94044a55-bd43-4551-a012-fc2c64f9e714/volumes" Dec 05 17:35:01 crc kubenswrapper[4753]: I1205 17:35:01.794038 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-storageinit-k8xt5" Dec 05 17:35:01 crc kubenswrapper[4753]: I1205 17:35:01.866703 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/855d7063-d23a-4a28-a652-e78ae24fcc27-config-data\") pod \"855d7063-d23a-4a28-a652-e78ae24fcc27\" (UID: \"855d7063-d23a-4a28-a652-e78ae24fcc27\") " Dec 05 17:35:01 crc kubenswrapper[4753]: I1205 17:35:01.866803 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/855d7063-d23a-4a28-a652-e78ae24fcc27-scripts\") pod \"855d7063-d23a-4a28-a652-e78ae24fcc27\" (UID: \"855d7063-d23a-4a28-a652-e78ae24fcc27\") " Dec 05 17:35:01 crc kubenswrapper[4753]: I1205 17:35:01.866881 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wvs2c\" (UniqueName: \"kubernetes.io/projected/855d7063-d23a-4a28-a652-e78ae24fcc27-kube-api-access-wvs2c\") pod \"855d7063-d23a-4a28-a652-e78ae24fcc27\" (UID: \"855d7063-d23a-4a28-a652-e78ae24fcc27\") " Dec 05 17:35:01 crc kubenswrapper[4753]: I1205 17:35:01.867070 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/855d7063-d23a-4a28-a652-e78ae24fcc27-combined-ca-bundle\") pod \"855d7063-d23a-4a28-a652-e78ae24fcc27\" (UID: \"855d7063-d23a-4a28-a652-e78ae24fcc27\") " Dec 05 17:35:01 crc kubenswrapper[4753]: I1205 17:35:01.867103 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/855d7063-d23a-4a28-a652-e78ae24fcc27-certs\") pod \"855d7063-d23a-4a28-a652-e78ae24fcc27\" (UID: \"855d7063-d23a-4a28-a652-e78ae24fcc27\") " Dec 05 17:35:01 crc kubenswrapper[4753]: I1205 17:35:01.873664 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/855d7063-d23a-4a28-a652-e78ae24fcc27-kube-api-access-wvs2c" (OuterVolumeSpecName: "kube-api-access-wvs2c") pod "855d7063-d23a-4a28-a652-e78ae24fcc27" (UID: "855d7063-d23a-4a28-a652-e78ae24fcc27"). InnerVolumeSpecName "kube-api-access-wvs2c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:35:01 crc kubenswrapper[4753]: I1205 17:35:01.884500 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/855d7063-d23a-4a28-a652-e78ae24fcc27-scripts" (OuterVolumeSpecName: "scripts") pod "855d7063-d23a-4a28-a652-e78ae24fcc27" (UID: "855d7063-d23a-4a28-a652-e78ae24fcc27"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:35:01 crc kubenswrapper[4753]: I1205 17:35:01.884560 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/855d7063-d23a-4a28-a652-e78ae24fcc27-certs" (OuterVolumeSpecName: "certs") pod "855d7063-d23a-4a28-a652-e78ae24fcc27" (UID: "855d7063-d23a-4a28-a652-e78ae24fcc27"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:35:01 crc kubenswrapper[4753]: I1205 17:35:01.906748 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/855d7063-d23a-4a28-a652-e78ae24fcc27-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "855d7063-d23a-4a28-a652-e78ae24fcc27" (UID: "855d7063-d23a-4a28-a652-e78ae24fcc27"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:35:01 crc kubenswrapper[4753]: I1205 17:35:01.907364 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/855d7063-d23a-4a28-a652-e78ae24fcc27-config-data" (OuterVolumeSpecName: "config-data") pod "855d7063-d23a-4a28-a652-e78ae24fcc27" (UID: "855d7063-d23a-4a28-a652-e78ae24fcc27"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:35:01 crc kubenswrapper[4753]: I1205 17:35:01.969711 4753 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/855d7063-d23a-4a28-a652-e78ae24fcc27-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:01 crc kubenswrapper[4753]: I1205 17:35:01.969742 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wvs2c\" (UniqueName: \"kubernetes.io/projected/855d7063-d23a-4a28-a652-e78ae24fcc27-kube-api-access-wvs2c\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:01 crc kubenswrapper[4753]: I1205 17:35:01.969756 4753 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/855d7063-d23a-4a28-a652-e78ae24fcc27-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:01 crc kubenswrapper[4753]: I1205 17:35:01.969765 4753 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/projected/855d7063-d23a-4a28-a652-e78ae24fcc27-certs\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:01 crc kubenswrapper[4753]: I1205 17:35:01.969774 4753 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/855d7063-d23a-4a28-a652-e78ae24fcc27-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.171700 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-c57d65f96-d2bw4" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.255378 4753 generic.go:334] "Generic (PLEG): container finished" podID="c69accdb-191f-4ea8-905c-308492e5c663" containerID="1ba8238bf495bb097c80467c828e210dfe78a57fcde23ab5146d7473dd1d52b6" exitCode=0 Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.255515 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-c57d65f96-d2bw4" event={"ID":"c69accdb-191f-4ea8-905c-308492e5c663","Type":"ContainerDied","Data":"1ba8238bf495bb097c80467c828e210dfe78a57fcde23ab5146d7473dd1d52b6"} Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.255552 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-c57d65f96-d2bw4" event={"ID":"c69accdb-191f-4ea8-905c-308492e5c663","Type":"ContainerDied","Data":"cc202ebae752a8f9147c2a29aa88d72437cad32f7fa4a5d208c3217417e57035"} Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.255586 4753 scope.go:117] "RemoveContainer" containerID="c592035b030be2d64129c175ef02f565c911e76af14dc1910fe0d629104a3d69" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.255813 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-c57d65f96-d2bw4" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.263252 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-storageinit-k8xt5" event={"ID":"855d7063-d23a-4a28-a652-e78ae24fcc27","Type":"ContainerDied","Data":"db73807b48e1ced827dd80666d16bd3587caa5eda74776727ff9dee9bf8fbc99"} Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.263397 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="db73807b48e1ced827dd80666d16bd3587caa5eda74776727ff9dee9bf8fbc99" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.263279 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-storageinit-k8xt5" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.277249 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/c69accdb-191f-4ea8-905c-308492e5c663-config\") pod \"c69accdb-191f-4ea8-905c-308492e5c663\" (UID: \"c69accdb-191f-4ea8-905c-308492e5c663\") " Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.277567 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/c69accdb-191f-4ea8-905c-308492e5c663-httpd-config\") pod \"c69accdb-191f-4ea8-905c-308492e5c663\" (UID: \"c69accdb-191f-4ea8-905c-308492e5c663\") " Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.277800 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rllgw\" (UniqueName: \"kubernetes.io/projected/c69accdb-191f-4ea8-905c-308492e5c663-kube-api-access-rllgw\") pod \"c69accdb-191f-4ea8-905c-308492e5c663\" (UID: \"c69accdb-191f-4ea8-905c-308492e5c663\") " Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.277890 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c69accdb-191f-4ea8-905c-308492e5c663-ovndb-tls-certs\") pod \"c69accdb-191f-4ea8-905c-308492e5c663\" (UID: \"c69accdb-191f-4ea8-905c-308492e5c663\") " Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.278355 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c69accdb-191f-4ea8-905c-308492e5c663-combined-ca-bundle\") pod \"c69accdb-191f-4ea8-905c-308492e5c663\" (UID: \"c69accdb-191f-4ea8-905c-308492e5c663\") " Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.284519 4753 scope.go:117] "RemoveContainer" containerID="1ba8238bf495bb097c80467c828e210dfe78a57fcde23ab5146d7473dd1d52b6" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.287660 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c69accdb-191f-4ea8-905c-308492e5c663-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "c69accdb-191f-4ea8-905c-308492e5c663" (UID: "c69accdb-191f-4ea8-905c-308492e5c663"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.288122 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c69accdb-191f-4ea8-905c-308492e5c663-kube-api-access-rllgw" (OuterVolumeSpecName: "kube-api-access-rllgw") pod "c69accdb-191f-4ea8-905c-308492e5c663" (UID: "c69accdb-191f-4ea8-905c-308492e5c663"). InnerVolumeSpecName "kube-api-access-rllgw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.323054 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.354427 4753 scope.go:117] "RemoveContainer" containerID="c592035b030be2d64129c175ef02f565c911e76af14dc1910fe0d629104a3d69" Dec 05 17:35:02 crc kubenswrapper[4753]: E1205 17:35:02.355436 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c592035b030be2d64129c175ef02f565c911e76af14dc1910fe0d629104a3d69\": container with ID starting with c592035b030be2d64129c175ef02f565c911e76af14dc1910fe0d629104a3d69 not found: ID does not exist" containerID="c592035b030be2d64129c175ef02f565c911e76af14dc1910fe0d629104a3d69" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.355623 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c592035b030be2d64129c175ef02f565c911e76af14dc1910fe0d629104a3d69"} err="failed to get container status \"c592035b030be2d64129c175ef02f565c911e76af14dc1910fe0d629104a3d69\": rpc error: code = NotFound desc = could not find container \"c592035b030be2d64129c175ef02f565c911e76af14dc1910fe0d629104a3d69\": container with ID starting with c592035b030be2d64129c175ef02f565c911e76af14dc1910fe0d629104a3d69 not found: ID does not exist" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.355716 4753 scope.go:117] "RemoveContainer" containerID="1ba8238bf495bb097c80467c828e210dfe78a57fcde23ab5146d7473dd1d52b6" Dec 05 17:35:02 crc kubenswrapper[4753]: E1205 17:35:02.355999 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1ba8238bf495bb097c80467c828e210dfe78a57fcde23ab5146d7473dd1d52b6\": container with ID starting with 1ba8238bf495bb097c80467c828e210dfe78a57fcde23ab5146d7473dd1d52b6 not found: ID does not exist" containerID="1ba8238bf495bb097c80467c828e210dfe78a57fcde23ab5146d7473dd1d52b6" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.356076 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1ba8238bf495bb097c80467c828e210dfe78a57fcde23ab5146d7473dd1d52b6"} err="failed to get container status \"1ba8238bf495bb097c80467c828e210dfe78a57fcde23ab5146d7473dd1d52b6\": rpc error: code = NotFound desc = could not find container \"1ba8238bf495bb097c80467c828e210dfe78a57fcde23ab5146d7473dd1d52b6\": container with ID starting with 1ba8238bf495bb097c80467c828e210dfe78a57fcde23ab5146d7473dd1d52b6 not found: ID does not exist" Dec 05 17:35:02 crc kubenswrapper[4753]: W1205 17:35:02.357241 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8adad7e9_de7d_440a_9ac9_55882e2fd944.slice/crio-cb55397b1e8dd1a10d9b2c2e5ab04f7651d3ab40147ccc4c6593aeb1c7c90ed7 WatchSource:0}: Error finding container cb55397b1e8dd1a10d9b2c2e5ab04f7651d3ab40147ccc4c6593aeb1c7c90ed7: Status 404 returned error can't find the container with id cb55397b1e8dd1a10d9b2c2e5ab04f7651d3ab40147ccc4c6593aeb1c7c90ed7 Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.384100 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c69accdb-191f-4ea8-905c-308492e5c663-config" (OuterVolumeSpecName: "config") pod "c69accdb-191f-4ea8-905c-308492e5c663" (UID: "c69accdb-191f-4ea8-905c-308492e5c663"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.384238 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rllgw\" (UniqueName: \"kubernetes.io/projected/c69accdb-191f-4ea8-905c-308492e5c663-kube-api-access-rllgw\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.385188 4753 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/c69accdb-191f-4ea8-905c-308492e5c663-httpd-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.423597 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 05 17:35:02 crc kubenswrapper[4753]: E1205 17:35:02.439014 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="855d7063-d23a-4a28-a652-e78ae24fcc27" containerName="cloudkitty-storageinit" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.439047 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="855d7063-d23a-4a28-a652-e78ae24fcc27" containerName="cloudkitty-storageinit" Dec 05 17:35:02 crc kubenswrapper[4753]: E1205 17:35:02.439090 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c69accdb-191f-4ea8-905c-308492e5c663" containerName="neutron-httpd" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.439100 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="c69accdb-191f-4ea8-905c-308492e5c663" containerName="neutron-httpd" Dec 05 17:35:02 crc kubenswrapper[4753]: E1205 17:35:02.439113 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c69accdb-191f-4ea8-905c-308492e5c663" containerName="neutron-api" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.439119 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="c69accdb-191f-4ea8-905c-308492e5c663" containerName="neutron-api" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.439324 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="855d7063-d23a-4a28-a652-e78ae24fcc27" containerName="cloudkitty-storageinit" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.439348 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="c69accdb-191f-4ea8-905c-308492e5c663" containerName="neutron-api" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.439358 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="c69accdb-191f-4ea8-905c-308492e5c663" containerName="neutron-httpd" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.440325 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-proc-0" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.448172 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-scripts" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.448239 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c69accdb-191f-4ea8-905c-308492e5c663-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "c69accdb-191f-4ea8-905c-308492e5c663" (UID: "c69accdb-191f-4ea8-905c-308492e5c663"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.448404 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-config-data" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.448516 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cloudkitty-client-internal" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.448627 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-cloudkitty-dockercfg-45bq2" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.451762 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-proc-config-data" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.454571 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.472928 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c69accdb-191f-4ea8-905c-308492e5c663-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c69accdb-191f-4ea8-905c-308492e5c663" (UID: "c69accdb-191f-4ea8-905c-308492e5c663"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.478276 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6bb4fc677f-m2665"] Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.478567 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6bb4fc677f-m2665" podUID="484f9ea6-0202-42e7-bcf5-619d5b3176b4" containerName="dnsmasq-dns" containerID="cri-o://22e1671dbce4385b0ee58f854736173a188b8e035ae3f0f166b4f8027b54a4fe" gracePeriod=10 Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.488565 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/15b8aae9-f3b8-4227-9d94-23d10aef377a-config-data-custom\") pod \"cloudkitty-proc-0\" (UID: \"15b8aae9-f3b8-4227-9d94-23d10aef377a\") " pod="openstack/cloudkitty-proc-0" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.488650 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-llhlx\" (UniqueName: \"kubernetes.io/projected/15b8aae9-f3b8-4227-9d94-23d10aef377a-kube-api-access-llhlx\") pod \"cloudkitty-proc-0\" (UID: \"15b8aae9-f3b8-4227-9d94-23d10aef377a\") " pod="openstack/cloudkitty-proc-0" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.488681 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/15b8aae9-f3b8-4227-9d94-23d10aef377a-certs\") pod \"cloudkitty-proc-0\" (UID: \"15b8aae9-f3b8-4227-9d94-23d10aef377a\") " pod="openstack/cloudkitty-proc-0" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.488788 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15b8aae9-f3b8-4227-9d94-23d10aef377a-config-data\") pod \"cloudkitty-proc-0\" (UID: \"15b8aae9-f3b8-4227-9d94-23d10aef377a\") " pod="openstack/cloudkitty-proc-0" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.488839 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/15b8aae9-f3b8-4227-9d94-23d10aef377a-scripts\") pod \"cloudkitty-proc-0\" (UID: \"15b8aae9-f3b8-4227-9d94-23d10aef377a\") " pod="openstack/cloudkitty-proc-0" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.488909 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15b8aae9-f3b8-4227-9d94-23d10aef377a-combined-ca-bundle\") pod \"cloudkitty-proc-0\" (UID: \"15b8aae9-f3b8-4227-9d94-23d10aef377a\") " pod="openstack/cloudkitty-proc-0" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.489021 4753 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/c69accdb-191f-4ea8-905c-308492e5c663-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.489037 4753 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c69accdb-191f-4ea8-905c-308492e5c663-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.489050 4753 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c69accdb-191f-4ea8-905c-308492e5c663-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.565346 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-86d9875b97-4k4sh"] Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.575977 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86d9875b97-4k4sh"] Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.576083 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86d9875b97-4k4sh" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.590934 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15b8aae9-f3b8-4227-9d94-23d10aef377a-combined-ca-bundle\") pod \"cloudkitty-proc-0\" (UID: \"15b8aae9-f3b8-4227-9d94-23d10aef377a\") " pod="openstack/cloudkitty-proc-0" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.591029 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/15b8aae9-f3b8-4227-9d94-23d10aef377a-config-data-custom\") pod \"cloudkitty-proc-0\" (UID: \"15b8aae9-f3b8-4227-9d94-23d10aef377a\") " pod="openstack/cloudkitty-proc-0" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.591076 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-llhlx\" (UniqueName: \"kubernetes.io/projected/15b8aae9-f3b8-4227-9d94-23d10aef377a-kube-api-access-llhlx\") pod \"cloudkitty-proc-0\" (UID: \"15b8aae9-f3b8-4227-9d94-23d10aef377a\") " pod="openstack/cloudkitty-proc-0" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.591099 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/15b8aae9-f3b8-4227-9d94-23d10aef377a-certs\") pod \"cloudkitty-proc-0\" (UID: \"15b8aae9-f3b8-4227-9d94-23d10aef377a\") " pod="openstack/cloudkitty-proc-0" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.591186 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15b8aae9-f3b8-4227-9d94-23d10aef377a-config-data\") pod \"cloudkitty-proc-0\" (UID: \"15b8aae9-f3b8-4227-9d94-23d10aef377a\") " pod="openstack/cloudkitty-proc-0" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.591216 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/15b8aae9-f3b8-4227-9d94-23d10aef377a-scripts\") pod \"cloudkitty-proc-0\" (UID: \"15b8aae9-f3b8-4227-9d94-23d10aef377a\") " pod="openstack/cloudkitty-proc-0" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.596039 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15b8aae9-f3b8-4227-9d94-23d10aef377a-combined-ca-bundle\") pod \"cloudkitty-proc-0\" (UID: \"15b8aae9-f3b8-4227-9d94-23d10aef377a\") " pod="openstack/cloudkitty-proc-0" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.598541 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/projected/15b8aae9-f3b8-4227-9d94-23d10aef377a-certs\") pod \"cloudkitty-proc-0\" (UID: \"15b8aae9-f3b8-4227-9d94-23d10aef377a\") " pod="openstack/cloudkitty-proc-0" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.598668 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15b8aae9-f3b8-4227-9d94-23d10aef377a-config-data\") pod \"cloudkitty-proc-0\" (UID: \"15b8aae9-f3b8-4227-9d94-23d10aef377a\") " pod="openstack/cloudkitty-proc-0" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.600971 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/15b8aae9-f3b8-4227-9d94-23d10aef377a-config-data-custom\") pod \"cloudkitty-proc-0\" (UID: \"15b8aae9-f3b8-4227-9d94-23d10aef377a\") " pod="openstack/cloudkitty-proc-0" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.602343 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/15b8aae9-f3b8-4227-9d94-23d10aef377a-scripts\") pod \"cloudkitty-proc-0\" (UID: \"15b8aae9-f3b8-4227-9d94-23d10aef377a\") " pod="openstack/cloudkitty-proc-0" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.642723 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-llhlx\" (UniqueName: \"kubernetes.io/projected/15b8aae9-f3b8-4227-9d94-23d10aef377a-kube-api-access-llhlx\") pod \"cloudkitty-proc-0\" (UID: \"15b8aae9-f3b8-4227-9d94-23d10aef377a\") " pod="openstack/cloudkitty-proc-0" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.651805 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-api-0"] Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.658288 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-api-0" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.660871 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-api-0"] Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.662365 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-api-config-data" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.692827 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a611a40-5e56-413b-8f3f-244522e530e7-config\") pod \"dnsmasq-dns-86d9875b97-4k4sh\" (UID: \"1a611a40-5e56-413b-8f3f-244522e530e7\") " pod="openstack/dnsmasq-dns-86d9875b97-4k4sh" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.692939 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1a611a40-5e56-413b-8f3f-244522e530e7-dns-svc\") pod \"dnsmasq-dns-86d9875b97-4k4sh\" (UID: \"1a611a40-5e56-413b-8f3f-244522e530e7\") " pod="openstack/dnsmasq-dns-86d9875b97-4k4sh" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.692987 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1a611a40-5e56-413b-8f3f-244522e530e7-ovsdbserver-nb\") pod \"dnsmasq-dns-86d9875b97-4k4sh\" (UID: \"1a611a40-5e56-413b-8f3f-244522e530e7\") " pod="openstack/dnsmasq-dns-86d9875b97-4k4sh" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.693071 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1a611a40-5e56-413b-8f3f-244522e530e7-dns-swift-storage-0\") pod \"dnsmasq-dns-86d9875b97-4k4sh\" (UID: \"1a611a40-5e56-413b-8f3f-244522e530e7\") " pod="openstack/dnsmasq-dns-86d9875b97-4k4sh" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.693165 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xts4k\" (UniqueName: \"kubernetes.io/projected/1a611a40-5e56-413b-8f3f-244522e530e7-kube-api-access-xts4k\") pod \"dnsmasq-dns-86d9875b97-4k4sh\" (UID: \"1a611a40-5e56-413b-8f3f-244522e530e7\") " pod="openstack/dnsmasq-dns-86d9875b97-4k4sh" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.693236 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1a611a40-5e56-413b-8f3f-244522e530e7-ovsdbserver-sb\") pod \"dnsmasq-dns-86d9875b97-4k4sh\" (UID: \"1a611a40-5e56-413b-8f3f-244522e530e7\") " pod="openstack/dnsmasq-dns-86d9875b97-4k4sh" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.720522 4753 scope.go:117] "RemoveContainer" containerID="87682a74661e693e498cd793cc20d16fc9f4a3b8a1a6b54f10285e2dcd15eafd" Dec 05 17:35:02 crc kubenswrapper[4753]: E1205 17:35:02.720916 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.791048 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-proc-0" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.794987 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4fd5d129-b983-4138-bb14-8ea03256faf1-logs\") pod \"cloudkitty-api-0\" (UID: \"4fd5d129-b983-4138-bb14-8ea03256faf1\") " pod="openstack/cloudkitty-api-0" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.795024 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1a611a40-5e56-413b-8f3f-244522e530e7-dns-swift-storage-0\") pod \"dnsmasq-dns-86d9875b97-4k4sh\" (UID: \"1a611a40-5e56-413b-8f3f-244522e530e7\") " pod="openstack/dnsmasq-dns-86d9875b97-4k4sh" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.795073 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/4fd5d129-b983-4138-bb14-8ea03256faf1-certs\") pod \"cloudkitty-api-0\" (UID: \"4fd5d129-b983-4138-bb14-8ea03256faf1\") " pod="openstack/cloudkitty-api-0" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.795092 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xts4k\" (UniqueName: \"kubernetes.io/projected/1a611a40-5e56-413b-8f3f-244522e530e7-kube-api-access-xts4k\") pod \"dnsmasq-dns-86d9875b97-4k4sh\" (UID: \"1a611a40-5e56-413b-8f3f-244522e530e7\") " pod="openstack/dnsmasq-dns-86d9875b97-4k4sh" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.795191 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1a611a40-5e56-413b-8f3f-244522e530e7-ovsdbserver-sb\") pod \"dnsmasq-dns-86d9875b97-4k4sh\" (UID: \"1a611a40-5e56-413b-8f3f-244522e530e7\") " pod="openstack/dnsmasq-dns-86d9875b97-4k4sh" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.795213 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fd5d129-b983-4138-bb14-8ea03256faf1-combined-ca-bundle\") pod \"cloudkitty-api-0\" (UID: \"4fd5d129-b983-4138-bb14-8ea03256faf1\") " pod="openstack/cloudkitty-api-0" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.795230 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4fd5d129-b983-4138-bb14-8ea03256faf1-config-data\") pod \"cloudkitty-api-0\" (UID: \"4fd5d129-b983-4138-bb14-8ea03256faf1\") " pod="openstack/cloudkitty-api-0" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.795303 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a611a40-5e56-413b-8f3f-244522e530e7-config\") pod \"dnsmasq-dns-86d9875b97-4k4sh\" (UID: \"1a611a40-5e56-413b-8f3f-244522e530e7\") " pod="openstack/dnsmasq-dns-86d9875b97-4k4sh" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.795335 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1a611a40-5e56-413b-8f3f-244522e530e7-dns-svc\") pod \"dnsmasq-dns-86d9875b97-4k4sh\" (UID: \"1a611a40-5e56-413b-8f3f-244522e530e7\") " pod="openstack/dnsmasq-dns-86d9875b97-4k4sh" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.795356 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8tknx\" (UniqueName: \"kubernetes.io/projected/4fd5d129-b983-4138-bb14-8ea03256faf1-kube-api-access-8tknx\") pod \"cloudkitty-api-0\" (UID: \"4fd5d129-b983-4138-bb14-8ea03256faf1\") " pod="openstack/cloudkitty-api-0" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.795373 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1a611a40-5e56-413b-8f3f-244522e530e7-ovsdbserver-nb\") pod \"dnsmasq-dns-86d9875b97-4k4sh\" (UID: \"1a611a40-5e56-413b-8f3f-244522e530e7\") " pod="openstack/dnsmasq-dns-86d9875b97-4k4sh" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.795386 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4fd5d129-b983-4138-bb14-8ea03256faf1-config-data-custom\") pod \"cloudkitty-api-0\" (UID: \"4fd5d129-b983-4138-bb14-8ea03256faf1\") " pod="openstack/cloudkitty-api-0" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.795419 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4fd5d129-b983-4138-bb14-8ea03256faf1-scripts\") pod \"cloudkitty-api-0\" (UID: \"4fd5d129-b983-4138-bb14-8ea03256faf1\") " pod="openstack/cloudkitty-api-0" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.797238 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1a611a40-5e56-413b-8f3f-244522e530e7-dns-swift-storage-0\") pod \"dnsmasq-dns-86d9875b97-4k4sh\" (UID: \"1a611a40-5e56-413b-8f3f-244522e530e7\") " pod="openstack/dnsmasq-dns-86d9875b97-4k4sh" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.801092 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a611a40-5e56-413b-8f3f-244522e530e7-config\") pod \"dnsmasq-dns-86d9875b97-4k4sh\" (UID: \"1a611a40-5e56-413b-8f3f-244522e530e7\") " pod="openstack/dnsmasq-dns-86d9875b97-4k4sh" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.801208 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1a611a40-5e56-413b-8f3f-244522e530e7-ovsdbserver-nb\") pod \"dnsmasq-dns-86d9875b97-4k4sh\" (UID: \"1a611a40-5e56-413b-8f3f-244522e530e7\") " pod="openstack/dnsmasq-dns-86d9875b97-4k4sh" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.801715 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1a611a40-5e56-413b-8f3f-244522e530e7-dns-svc\") pod \"dnsmasq-dns-86d9875b97-4k4sh\" (UID: \"1a611a40-5e56-413b-8f3f-244522e530e7\") " pod="openstack/dnsmasq-dns-86d9875b97-4k4sh" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.803919 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1a611a40-5e56-413b-8f3f-244522e530e7-ovsdbserver-sb\") pod \"dnsmasq-dns-86d9875b97-4k4sh\" (UID: \"1a611a40-5e56-413b-8f3f-244522e530e7\") " pod="openstack/dnsmasq-dns-86d9875b97-4k4sh" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.820658 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xts4k\" (UniqueName: \"kubernetes.io/projected/1a611a40-5e56-413b-8f3f-244522e530e7-kube-api-access-xts4k\") pod \"dnsmasq-dns-86d9875b97-4k4sh\" (UID: \"1a611a40-5e56-413b-8f3f-244522e530e7\") " pod="openstack/dnsmasq-dns-86d9875b97-4k4sh" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.822407 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-c57d65f96-d2bw4"] Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.835990 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-c57d65f96-d2bw4"] Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.898408 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fd5d129-b983-4138-bb14-8ea03256faf1-combined-ca-bundle\") pod \"cloudkitty-api-0\" (UID: \"4fd5d129-b983-4138-bb14-8ea03256faf1\") " pod="openstack/cloudkitty-api-0" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.898457 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4fd5d129-b983-4138-bb14-8ea03256faf1-config-data\") pod \"cloudkitty-api-0\" (UID: \"4fd5d129-b983-4138-bb14-8ea03256faf1\") " pod="openstack/cloudkitty-api-0" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.898561 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8tknx\" (UniqueName: \"kubernetes.io/projected/4fd5d129-b983-4138-bb14-8ea03256faf1-kube-api-access-8tknx\") pod \"cloudkitty-api-0\" (UID: \"4fd5d129-b983-4138-bb14-8ea03256faf1\") " pod="openstack/cloudkitty-api-0" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.898588 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4fd5d129-b983-4138-bb14-8ea03256faf1-config-data-custom\") pod \"cloudkitty-api-0\" (UID: \"4fd5d129-b983-4138-bb14-8ea03256faf1\") " pod="openstack/cloudkitty-api-0" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.898620 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4fd5d129-b983-4138-bb14-8ea03256faf1-scripts\") pod \"cloudkitty-api-0\" (UID: \"4fd5d129-b983-4138-bb14-8ea03256faf1\") " pod="openstack/cloudkitty-api-0" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.898678 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4fd5d129-b983-4138-bb14-8ea03256faf1-logs\") pod \"cloudkitty-api-0\" (UID: \"4fd5d129-b983-4138-bb14-8ea03256faf1\") " pod="openstack/cloudkitty-api-0" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.898878 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/4fd5d129-b983-4138-bb14-8ea03256faf1-certs\") pod \"cloudkitty-api-0\" (UID: \"4fd5d129-b983-4138-bb14-8ea03256faf1\") " pod="openstack/cloudkitty-api-0" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.902017 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4fd5d129-b983-4138-bb14-8ea03256faf1-logs\") pod \"cloudkitty-api-0\" (UID: \"4fd5d129-b983-4138-bb14-8ea03256faf1\") " pod="openstack/cloudkitty-api-0" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.905504 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4fd5d129-b983-4138-bb14-8ea03256faf1-scripts\") pod \"cloudkitty-api-0\" (UID: \"4fd5d129-b983-4138-bb14-8ea03256faf1\") " pod="openstack/cloudkitty-api-0" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.909419 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fd5d129-b983-4138-bb14-8ea03256faf1-combined-ca-bundle\") pod \"cloudkitty-api-0\" (UID: \"4fd5d129-b983-4138-bb14-8ea03256faf1\") " pod="openstack/cloudkitty-api-0" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.909852 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/projected/4fd5d129-b983-4138-bb14-8ea03256faf1-certs\") pod \"cloudkitty-api-0\" (UID: \"4fd5d129-b983-4138-bb14-8ea03256faf1\") " pod="openstack/cloudkitty-api-0" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.912788 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4fd5d129-b983-4138-bb14-8ea03256faf1-config-data\") pod \"cloudkitty-api-0\" (UID: \"4fd5d129-b983-4138-bb14-8ea03256faf1\") " pod="openstack/cloudkitty-api-0" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.923946 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4fd5d129-b983-4138-bb14-8ea03256faf1-config-data-custom\") pod \"cloudkitty-api-0\" (UID: \"4fd5d129-b983-4138-bb14-8ea03256faf1\") " pod="openstack/cloudkitty-api-0" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.934015 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8tknx\" (UniqueName: \"kubernetes.io/projected/4fd5d129-b983-4138-bb14-8ea03256faf1-kube-api-access-8tknx\") pod \"cloudkitty-api-0\" (UID: \"4fd5d129-b983-4138-bb14-8ea03256faf1\") " pod="openstack/cloudkitty-api-0" Dec 05 17:35:02 crc kubenswrapper[4753]: I1205 17:35:02.958082 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-66c6899d8d-whfkr" Dec 05 17:35:03 crc kubenswrapper[4753]: I1205 17:35:03.116821 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86d9875b97-4k4sh" Dec 05 17:35:03 crc kubenswrapper[4753]: I1205 17:35:03.157579 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bb4fc677f-m2665" Dec 05 17:35:03 crc kubenswrapper[4753]: I1205 17:35:03.169612 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-api-0" Dec 05 17:35:03 crc kubenswrapper[4753]: I1205 17:35:03.209161 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g8lw6\" (UniqueName: \"kubernetes.io/projected/484f9ea6-0202-42e7-bcf5-619d5b3176b4-kube-api-access-g8lw6\") pod \"484f9ea6-0202-42e7-bcf5-619d5b3176b4\" (UID: \"484f9ea6-0202-42e7-bcf5-619d5b3176b4\") " Dec 05 17:35:03 crc kubenswrapper[4753]: I1205 17:35:03.209606 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/484f9ea6-0202-42e7-bcf5-619d5b3176b4-dns-swift-storage-0\") pod \"484f9ea6-0202-42e7-bcf5-619d5b3176b4\" (UID: \"484f9ea6-0202-42e7-bcf5-619d5b3176b4\") " Dec 05 17:35:03 crc kubenswrapper[4753]: I1205 17:35:03.209759 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/484f9ea6-0202-42e7-bcf5-619d5b3176b4-config\") pod \"484f9ea6-0202-42e7-bcf5-619d5b3176b4\" (UID: \"484f9ea6-0202-42e7-bcf5-619d5b3176b4\") " Dec 05 17:35:03 crc kubenswrapper[4753]: I1205 17:35:03.209831 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/484f9ea6-0202-42e7-bcf5-619d5b3176b4-ovsdbserver-sb\") pod \"484f9ea6-0202-42e7-bcf5-619d5b3176b4\" (UID: \"484f9ea6-0202-42e7-bcf5-619d5b3176b4\") " Dec 05 17:35:03 crc kubenswrapper[4753]: I1205 17:35:03.209980 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/484f9ea6-0202-42e7-bcf5-619d5b3176b4-dns-svc\") pod \"484f9ea6-0202-42e7-bcf5-619d5b3176b4\" (UID: \"484f9ea6-0202-42e7-bcf5-619d5b3176b4\") " Dec 05 17:35:03 crc kubenswrapper[4753]: I1205 17:35:03.210062 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/484f9ea6-0202-42e7-bcf5-619d5b3176b4-ovsdbserver-nb\") pod \"484f9ea6-0202-42e7-bcf5-619d5b3176b4\" (UID: \"484f9ea6-0202-42e7-bcf5-619d5b3176b4\") " Dec 05 17:35:03 crc kubenswrapper[4753]: I1205 17:35:03.216923 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/484f9ea6-0202-42e7-bcf5-619d5b3176b4-kube-api-access-g8lw6" (OuterVolumeSpecName: "kube-api-access-g8lw6") pod "484f9ea6-0202-42e7-bcf5-619d5b3176b4" (UID: "484f9ea6-0202-42e7-bcf5-619d5b3176b4"). InnerVolumeSpecName "kube-api-access-g8lw6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:35:03 crc kubenswrapper[4753]: I1205 17:35:03.313353 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g8lw6\" (UniqueName: \"kubernetes.io/projected/484f9ea6-0202-42e7-bcf5-619d5b3176b4-kube-api-access-g8lw6\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:03 crc kubenswrapper[4753]: I1205 17:35:03.334987 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"8adad7e9-de7d-440a-9ac9-55882e2fd944","Type":"ContainerStarted","Data":"cb55397b1e8dd1a10d9b2c2e5ab04f7651d3ab40147ccc4c6593aeb1c7c90ed7"} Dec 05 17:35:03 crc kubenswrapper[4753]: I1205 17:35:03.351021 4753 generic.go:334] "Generic (PLEG): container finished" podID="484f9ea6-0202-42e7-bcf5-619d5b3176b4" containerID="22e1671dbce4385b0ee58f854736173a188b8e035ae3f0f166b4f8027b54a4fe" exitCode=0 Dec 05 17:35:03 crc kubenswrapper[4753]: I1205 17:35:03.351064 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bb4fc677f-m2665" event={"ID":"484f9ea6-0202-42e7-bcf5-619d5b3176b4","Type":"ContainerDied","Data":"22e1671dbce4385b0ee58f854736173a188b8e035ae3f0f166b4f8027b54a4fe"} Dec 05 17:35:03 crc kubenswrapper[4753]: I1205 17:35:03.351092 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bb4fc677f-m2665" event={"ID":"484f9ea6-0202-42e7-bcf5-619d5b3176b4","Type":"ContainerDied","Data":"4b0e20daafb9374fcbdc96a640c696e84e161558a6753411ea51e75778b66c89"} Dec 05 17:35:03 crc kubenswrapper[4753]: I1205 17:35:03.351112 4753 scope.go:117] "RemoveContainer" containerID="22e1671dbce4385b0ee58f854736173a188b8e035ae3f0f166b4f8027b54a4fe" Dec 05 17:35:03 crc kubenswrapper[4753]: I1205 17:35:03.351280 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bb4fc677f-m2665" Dec 05 17:35:03 crc kubenswrapper[4753]: I1205 17:35:03.365635 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/484f9ea6-0202-42e7-bcf5-619d5b3176b4-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "484f9ea6-0202-42e7-bcf5-619d5b3176b4" (UID: "484f9ea6-0202-42e7-bcf5-619d5b3176b4"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:35:03 crc kubenswrapper[4753]: I1205 17:35:03.403737 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/484f9ea6-0202-42e7-bcf5-619d5b3176b4-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "484f9ea6-0202-42e7-bcf5-619d5b3176b4" (UID: "484f9ea6-0202-42e7-bcf5-619d5b3176b4"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:35:03 crc kubenswrapper[4753]: I1205 17:35:03.417621 4753 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/484f9ea6-0202-42e7-bcf5-619d5b3176b4-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:03 crc kubenswrapper[4753]: I1205 17:35:03.417655 4753 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/484f9ea6-0202-42e7-bcf5-619d5b3176b4-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:03 crc kubenswrapper[4753]: I1205 17:35:03.421619 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-66c6899d8d-whfkr" Dec 05 17:35:03 crc kubenswrapper[4753]: I1205 17:35:03.508897 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 05 17:35:03 crc kubenswrapper[4753]: I1205 17:35:03.524040 4753 scope.go:117] "RemoveContainer" containerID="b1e7eb7ad19d0be0fdc05aa216a8b7434ea026cb3ea4caf4dbed1d396196819d" Dec 05 17:35:03 crc kubenswrapper[4753]: I1205 17:35:03.575761 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/484f9ea6-0202-42e7-bcf5-619d5b3176b4-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "484f9ea6-0202-42e7-bcf5-619d5b3176b4" (UID: "484f9ea6-0202-42e7-bcf5-619d5b3176b4"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:35:03 crc kubenswrapper[4753]: I1205 17:35:03.577426 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/484f9ea6-0202-42e7-bcf5-619d5b3176b4-config" (OuterVolumeSpecName: "config") pod "484f9ea6-0202-42e7-bcf5-619d5b3176b4" (UID: "484f9ea6-0202-42e7-bcf5-619d5b3176b4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:35:03 crc kubenswrapper[4753]: I1205 17:35:03.597640 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/484f9ea6-0202-42e7-bcf5-619d5b3176b4-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "484f9ea6-0202-42e7-bcf5-619d5b3176b4" (UID: "484f9ea6-0202-42e7-bcf5-619d5b3176b4"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:35:03 crc kubenswrapper[4753]: I1205 17:35:03.627638 4753 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/484f9ea6-0202-42e7-bcf5-619d5b3176b4-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:03 crc kubenswrapper[4753]: I1205 17:35:03.627679 4753 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/484f9ea6-0202-42e7-bcf5-619d5b3176b4-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:03 crc kubenswrapper[4753]: I1205 17:35:03.627694 4753 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/484f9ea6-0202-42e7-bcf5-619d5b3176b4-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:03 crc kubenswrapper[4753]: E1205 17:35:03.646236 4753 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0acfa75f_8372_4ccb_a91a_6741854cdd34.slice\": RecentStats: unable to find data in memory cache]" Dec 05 17:35:03 crc kubenswrapper[4753]: I1205 17:35:03.766132 4753 scope.go:117] "RemoveContainer" containerID="22e1671dbce4385b0ee58f854736173a188b8e035ae3f0f166b4f8027b54a4fe" Dec 05 17:35:03 crc kubenswrapper[4753]: E1205 17:35:03.770596 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"22e1671dbce4385b0ee58f854736173a188b8e035ae3f0f166b4f8027b54a4fe\": container with ID starting with 22e1671dbce4385b0ee58f854736173a188b8e035ae3f0f166b4f8027b54a4fe not found: ID does not exist" containerID="22e1671dbce4385b0ee58f854736173a188b8e035ae3f0f166b4f8027b54a4fe" Dec 05 17:35:03 crc kubenswrapper[4753]: I1205 17:35:03.770936 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22e1671dbce4385b0ee58f854736173a188b8e035ae3f0f166b4f8027b54a4fe"} err="failed to get container status \"22e1671dbce4385b0ee58f854736173a188b8e035ae3f0f166b4f8027b54a4fe\": rpc error: code = NotFound desc = could not find container \"22e1671dbce4385b0ee58f854736173a188b8e035ae3f0f166b4f8027b54a4fe\": container with ID starting with 22e1671dbce4385b0ee58f854736173a188b8e035ae3f0f166b4f8027b54a4fe not found: ID does not exist" Dec 05 17:35:03 crc kubenswrapper[4753]: I1205 17:35:03.770968 4753 scope.go:117] "RemoveContainer" containerID="b1e7eb7ad19d0be0fdc05aa216a8b7434ea026cb3ea4caf4dbed1d396196819d" Dec 05 17:35:03 crc kubenswrapper[4753]: E1205 17:35:03.774466 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b1e7eb7ad19d0be0fdc05aa216a8b7434ea026cb3ea4caf4dbed1d396196819d\": container with ID starting with b1e7eb7ad19d0be0fdc05aa216a8b7434ea026cb3ea4caf4dbed1d396196819d not found: ID does not exist" containerID="b1e7eb7ad19d0be0fdc05aa216a8b7434ea026cb3ea4caf4dbed1d396196819d" Dec 05 17:35:03 crc kubenswrapper[4753]: I1205 17:35:03.774501 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b1e7eb7ad19d0be0fdc05aa216a8b7434ea026cb3ea4caf4dbed1d396196819d"} err="failed to get container status \"b1e7eb7ad19d0be0fdc05aa216a8b7434ea026cb3ea4caf4dbed1d396196819d\": rpc error: code = NotFound desc = could not find container \"b1e7eb7ad19d0be0fdc05aa216a8b7434ea026cb3ea4caf4dbed1d396196819d\": container with ID starting with b1e7eb7ad19d0be0fdc05aa216a8b7434ea026cb3ea4caf4dbed1d396196819d not found: ID does not exist" Dec 05 17:35:03 crc kubenswrapper[4753]: I1205 17:35:03.846467 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c69accdb-191f-4ea8-905c-308492e5c663" path="/var/lib/kubelet/pods/c69accdb-191f-4ea8-905c-308492e5c663/volumes" Dec 05 17:35:03 crc kubenswrapper[4753]: I1205 17:35:03.874898 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6bb4fc677f-m2665"] Dec 05 17:35:03 crc kubenswrapper[4753]: I1205 17:35:03.889208 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6bb4fc677f-m2665"] Dec 05 17:35:03 crc kubenswrapper[4753]: I1205 17:35:03.910499 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86d9875b97-4k4sh"] Dec 05 17:35:04 crc kubenswrapper[4753]: I1205 17:35:04.080199 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-api-0"] Dec 05 17:35:04 crc kubenswrapper[4753]: W1205 17:35:04.100259 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4fd5d129_b983_4138_bb14_8ea03256faf1.slice/crio-b456ac63204cbafeb7f936dedce8411b3f9dc805f9e851d35afea78a595491d8 WatchSource:0}: Error finding container b456ac63204cbafeb7f936dedce8411b3f9dc805f9e851d35afea78a595491d8: Status 404 returned error can't find the container with id b456ac63204cbafeb7f936dedce8411b3f9dc805f9e851d35afea78a595491d8 Dec 05 17:35:04 crc kubenswrapper[4753]: I1205 17:35:04.392706 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86d9875b97-4k4sh" event={"ID":"1a611a40-5e56-413b-8f3f-244522e530e7","Type":"ContainerStarted","Data":"b6fac4a38433e3c9efd7d4017bb76ea9b494e0c56f8f21b5dee3bebb0fcbc5df"} Dec 05 17:35:04 crc kubenswrapper[4753]: I1205 17:35:04.393077 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86d9875b97-4k4sh" event={"ID":"1a611a40-5e56-413b-8f3f-244522e530e7","Type":"ContainerStarted","Data":"221da1e6d94006b11246110a7ab8c353651a553c7a5407adfe086426b367f37b"} Dec 05 17:35:04 crc kubenswrapper[4753]: I1205 17:35:04.397900 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"4fd5d129-b983-4138-bb14-8ea03256faf1","Type":"ContainerStarted","Data":"b456ac63204cbafeb7f936dedce8411b3f9dc805f9e851d35afea78a595491d8"} Dec 05 17:35:04 crc kubenswrapper[4753]: I1205 17:35:04.401417 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"15b8aae9-f3b8-4227-9d94-23d10aef377a","Type":"ContainerStarted","Data":"fdc7049b451a037f55820eb0d9429a266235aab5c30488d76c0fe25abeab452e"} Dec 05 17:35:04 crc kubenswrapper[4753]: I1205 17:35:04.406864 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"8adad7e9-de7d-440a-9ac9-55882e2fd944","Type":"ContainerStarted","Data":"009eb98532127a6a88ba065224d6daca6a7b1517dfa9ade8b7b64bb95f5046b7"} Dec 05 17:35:05 crc kubenswrapper[4753]: I1205 17:35:05.428391 4753 generic.go:334] "Generic (PLEG): container finished" podID="1a611a40-5e56-413b-8f3f-244522e530e7" containerID="b6fac4a38433e3c9efd7d4017bb76ea9b494e0c56f8f21b5dee3bebb0fcbc5df" exitCode=0 Dec 05 17:35:05 crc kubenswrapper[4753]: I1205 17:35:05.429744 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86d9875b97-4k4sh" event={"ID":"1a611a40-5e56-413b-8f3f-244522e530e7","Type":"ContainerDied","Data":"b6fac4a38433e3c9efd7d4017bb76ea9b494e0c56f8f21b5dee3bebb0fcbc5df"} Dec 05 17:35:05 crc kubenswrapper[4753]: I1205 17:35:05.435882 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"4fd5d129-b983-4138-bb14-8ea03256faf1","Type":"ContainerStarted","Data":"441c72cee2c5e72d4e53ea0a81e15da8574cf9c270429fe0185a2deb0aa7d438"} Dec 05 17:35:05 crc kubenswrapper[4753]: I1205 17:35:05.435917 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"4fd5d129-b983-4138-bb14-8ea03256faf1","Type":"ContainerStarted","Data":"4d7e08e6a56c80be78a2d46dfa801d7d7e10e6d9458ac719f89618c57073a60f"} Dec 05 17:35:05 crc kubenswrapper[4753]: I1205 17:35:05.436661 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-api-0" Dec 05 17:35:05 crc kubenswrapper[4753]: I1205 17:35:05.447269 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"8adad7e9-de7d-440a-9ac9-55882e2fd944","Type":"ContainerStarted","Data":"99f0a2a76a46d934a238ab9f2b4b1694c36defc323595518021eb6ddf7876ce8"} Dec 05 17:35:05 crc kubenswrapper[4753]: I1205 17:35:05.447625 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 05 17:35:05 crc kubenswrapper[4753]: I1205 17:35:05.478484 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=4.478469813 podStartE2EDuration="4.478469813s" podCreationTimestamp="2025-12-05 17:35:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:35:05.475367224 +0000 UTC m=+1843.978474230" watchObservedRunningTime="2025-12-05 17:35:05.478469813 +0000 UTC m=+1843.981576819" Dec 05 17:35:05 crc kubenswrapper[4753]: I1205 17:35:05.501141 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-api-0" podStartSLOduration=3.501123106 podStartE2EDuration="3.501123106s" podCreationTimestamp="2025-12-05 17:35:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:35:05.499270103 +0000 UTC m=+1844.002377119" watchObservedRunningTime="2025-12-05 17:35:05.501123106 +0000 UTC m=+1844.004230112" Dec 05 17:35:05 crc kubenswrapper[4753]: I1205 17:35:05.549680 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-api-0"] Dec 05 17:35:05 crc kubenswrapper[4753]: I1205 17:35:05.745216 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="484f9ea6-0202-42e7-bcf5-619d5b3176b4" path="/var/lib/kubelet/pods/484f9ea6-0202-42e7-bcf5-619d5b3176b4/volumes" Dec 05 17:35:05 crc kubenswrapper[4753]: I1205 17:35:05.797545 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-7f996df5b6-dlvm2" Dec 05 17:35:05 crc kubenswrapper[4753]: I1205 17:35:05.849789 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-7f996df5b6-dlvm2" Dec 05 17:35:05 crc kubenswrapper[4753]: I1205 17:35:05.941033 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-66c6899d8d-whfkr"] Dec 05 17:35:05 crc kubenswrapper[4753]: I1205 17:35:05.941543 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-66c6899d8d-whfkr" podUID="6686b0a0-8673-4e80-9763-64bc2d17834b" containerName="barbican-api-log" containerID="cri-o://d0c9e642b21e6139c5a66b59c596c1f7aa3e4672d9fe42c96f474da7a4e9e87d" gracePeriod=30 Dec 05 17:35:05 crc kubenswrapper[4753]: I1205 17:35:05.941980 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-66c6899d8d-whfkr" podUID="6686b0a0-8673-4e80-9763-64bc2d17834b" containerName="barbican-api" containerID="cri-o://16a611b787135d2752ac8dd584d360f5edaa9fc39463d3e19413fe8aff788e4c" gracePeriod=30 Dec 05 17:35:06 crc kubenswrapper[4753]: I1205 17:35:06.012348 4753 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-66c6899d8d-whfkr" podUID="6686b0a0-8673-4e80-9763-64bc2d17834b" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.180:9311/healthcheck\": EOF" Dec 05 17:35:06 crc kubenswrapper[4753]: I1205 17:35:06.036317 4753 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-66c6899d8d-whfkr" podUID="6686b0a0-8673-4e80-9763-64bc2d17834b" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.180:9311/healthcheck\": EOF" Dec 05 17:35:06 crc kubenswrapper[4753]: I1205 17:35:06.275567 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 05 17:35:06 crc kubenswrapper[4753]: I1205 17:35:06.346720 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 17:35:06 crc kubenswrapper[4753]: I1205 17:35:06.458939 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86d9875b97-4k4sh" event={"ID":"1a611a40-5e56-413b-8f3f-244522e530e7","Type":"ContainerStarted","Data":"cb46533efe28465f7340e3af85f8cdbc3da3c4e966742908a6f98e20a671ba9d"} Dec 05 17:35:06 crc kubenswrapper[4753]: I1205 17:35:06.459479 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-86d9875b97-4k4sh" Dec 05 17:35:06 crc kubenswrapper[4753]: I1205 17:35:06.461475 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"15b8aae9-f3b8-4227-9d94-23d10aef377a","Type":"ContainerStarted","Data":"f70b9579e57dcb8bcaa9103e83217045d19e37c2221b5c2e76b83fbee42b1a6b"} Dec 05 17:35:06 crc kubenswrapper[4753]: I1205 17:35:06.473985 4753 generic.go:334] "Generic (PLEG): container finished" podID="6686b0a0-8673-4e80-9763-64bc2d17834b" containerID="d0c9e642b21e6139c5a66b59c596c1f7aa3e4672d9fe42c96f474da7a4e9e87d" exitCode=143 Dec 05 17:35:06 crc kubenswrapper[4753]: I1205 17:35:06.483506 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-66c6899d8d-whfkr" event={"ID":"6686b0a0-8673-4e80-9763-64bc2d17834b","Type":"ContainerDied","Data":"d0c9e642b21e6139c5a66b59c596c1f7aa3e4672d9fe42c96f474da7a4e9e87d"} Dec 05 17:35:06 crc kubenswrapper[4753]: I1205 17:35:06.484090 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a" containerName="cinder-scheduler" containerID="cri-o://de921abec4ad1a36d869bb9bfd2da82c694d12d0f9e9021565874ace05d6ffcf" gracePeriod=30 Dec 05 17:35:06 crc kubenswrapper[4753]: I1205 17:35:06.484386 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a" containerName="probe" containerID="cri-o://d69e947bd25344ecf8280fbfa546202f34127171ef1438b3beab67053788ba94" gracePeriod=30 Dec 05 17:35:06 crc kubenswrapper[4753]: I1205 17:35:06.518431 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-86d9875b97-4k4sh" podStartSLOduration=4.518407456 podStartE2EDuration="4.518407456s" podCreationTimestamp="2025-12-05 17:35:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:35:06.491979276 +0000 UTC m=+1844.995086282" watchObservedRunningTime="2025-12-05 17:35:06.518407456 +0000 UTC m=+1845.021514462" Dec 05 17:35:06 crc kubenswrapper[4753]: I1205 17:35:06.524214 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-proc-0" podStartSLOduration=2.371638991 podStartE2EDuration="4.524199s" podCreationTimestamp="2025-12-05 17:35:02 +0000 UTC" firstStartedPulling="2025-12-05 17:35:03.556709656 +0000 UTC m=+1842.059816662" lastFinishedPulling="2025-12-05 17:35:05.709269665 +0000 UTC m=+1844.212376671" observedRunningTime="2025-12-05 17:35:06.51892539 +0000 UTC m=+1845.022032396" watchObservedRunningTime="2025-12-05 17:35:06.524199 +0000 UTC m=+1845.027306006" Dec 05 17:35:06 crc kubenswrapper[4753]: I1205 17:35:06.577789 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 05 17:35:06 crc kubenswrapper[4753]: I1205 17:35:06.998137 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-56466dc556-cvwd4" Dec 05 17:35:07 crc kubenswrapper[4753]: I1205 17:35:07.001587 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-56466dc556-cvwd4" Dec 05 17:35:07 crc kubenswrapper[4753]: I1205 17:35:07.488024 4753 generic.go:334] "Generic (PLEG): container finished" podID="a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a" containerID="d69e947bd25344ecf8280fbfa546202f34127171ef1438b3beab67053788ba94" exitCode=0 Dec 05 17:35:07 crc kubenswrapper[4753]: I1205 17:35:07.488190 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a","Type":"ContainerDied","Data":"d69e947bd25344ecf8280fbfa546202f34127171ef1438b3beab67053788ba94"} Dec 05 17:35:07 crc kubenswrapper[4753]: I1205 17:35:07.488374 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cloudkitty-api-0" podUID="4fd5d129-b983-4138-bb14-8ea03256faf1" containerName="cloudkitty-api-log" containerID="cri-o://4d7e08e6a56c80be78a2d46dfa801d7d7e10e6d9458ac719f89618c57073a60f" gracePeriod=30 Dec 05 17:35:07 crc kubenswrapper[4753]: I1205 17:35:07.488478 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cloudkitty-api-0" podUID="4fd5d129-b983-4138-bb14-8ea03256faf1" containerName="cloudkitty-api" containerID="cri-o://441c72cee2c5e72d4e53ea0a81e15da8574cf9c270429fe0185a2deb0aa7d438" gracePeriod=30 Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.159667 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-api-0" Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.186365 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4fd5d129-b983-4138-bb14-8ea03256faf1-scripts\") pod \"4fd5d129-b983-4138-bb14-8ea03256faf1\" (UID: \"4fd5d129-b983-4138-bb14-8ea03256faf1\") " Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.186442 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tknx\" (UniqueName: \"kubernetes.io/projected/4fd5d129-b983-4138-bb14-8ea03256faf1-kube-api-access-8tknx\") pod \"4fd5d129-b983-4138-bb14-8ea03256faf1\" (UID: \"4fd5d129-b983-4138-bb14-8ea03256faf1\") " Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.186518 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fd5d129-b983-4138-bb14-8ea03256faf1-combined-ca-bundle\") pod \"4fd5d129-b983-4138-bb14-8ea03256faf1\" (UID: \"4fd5d129-b983-4138-bb14-8ea03256faf1\") " Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.186653 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4fd5d129-b983-4138-bb14-8ea03256faf1-config-data\") pod \"4fd5d129-b983-4138-bb14-8ea03256faf1\" (UID: \"4fd5d129-b983-4138-bb14-8ea03256faf1\") " Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.186683 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4fd5d129-b983-4138-bb14-8ea03256faf1-config-data-custom\") pod \"4fd5d129-b983-4138-bb14-8ea03256faf1\" (UID: \"4fd5d129-b983-4138-bb14-8ea03256faf1\") " Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.186705 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4fd5d129-b983-4138-bb14-8ea03256faf1-logs\") pod \"4fd5d129-b983-4138-bb14-8ea03256faf1\" (UID: \"4fd5d129-b983-4138-bb14-8ea03256faf1\") " Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.186734 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/4fd5d129-b983-4138-bb14-8ea03256faf1-certs\") pod \"4fd5d129-b983-4138-bb14-8ea03256faf1\" (UID: \"4fd5d129-b983-4138-bb14-8ea03256faf1\") " Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.188661 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4fd5d129-b983-4138-bb14-8ea03256faf1-logs" (OuterVolumeSpecName: "logs") pod "4fd5d129-b983-4138-bb14-8ea03256faf1" (UID: "4fd5d129-b983-4138-bb14-8ea03256faf1"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.196536 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4fd5d129-b983-4138-bb14-8ea03256faf1-kube-api-access-8tknx" (OuterVolumeSpecName: "kube-api-access-8tknx") pod "4fd5d129-b983-4138-bb14-8ea03256faf1" (UID: "4fd5d129-b983-4138-bb14-8ea03256faf1"). InnerVolumeSpecName "kube-api-access-8tknx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.198626 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4fd5d129-b983-4138-bb14-8ea03256faf1-scripts" (OuterVolumeSpecName: "scripts") pod "4fd5d129-b983-4138-bb14-8ea03256faf1" (UID: "4fd5d129-b983-4138-bb14-8ea03256faf1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.212498 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4fd5d129-b983-4138-bb14-8ea03256faf1-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "4fd5d129-b983-4138-bb14-8ea03256faf1" (UID: "4fd5d129-b983-4138-bb14-8ea03256faf1"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.223229 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4fd5d129-b983-4138-bb14-8ea03256faf1-certs" (OuterVolumeSpecName: "certs") pod "4fd5d129-b983-4138-bb14-8ea03256faf1" (UID: "4fd5d129-b983-4138-bb14-8ea03256faf1"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.288709 4753 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4fd5d129-b983-4138-bb14-8ea03256faf1-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.288740 4753 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4fd5d129-b983-4138-bb14-8ea03256faf1-logs\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.288748 4753 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/projected/4fd5d129-b983-4138-bb14-8ea03256faf1-certs\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.288756 4753 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4fd5d129-b983-4138-bb14-8ea03256faf1-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.288764 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tknx\" (UniqueName: \"kubernetes.io/projected/4fd5d129-b983-4138-bb14-8ea03256faf1-kube-api-access-8tknx\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.295272 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4fd5d129-b983-4138-bb14-8ea03256faf1-config-data" (OuterVolumeSpecName: "config-data") pod "4fd5d129-b983-4138-bb14-8ea03256faf1" (UID: "4fd5d129-b983-4138-bb14-8ea03256faf1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.302441 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4fd5d129-b983-4138-bb14-8ea03256faf1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4fd5d129-b983-4138-bb14-8ea03256faf1" (UID: "4fd5d129-b983-4138-bb14-8ea03256faf1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.390649 4753 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4fd5d129-b983-4138-bb14-8ea03256faf1-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.390693 4753 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4fd5d129-b983-4138-bb14-8ea03256faf1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.498991 4753 generic.go:334] "Generic (PLEG): container finished" podID="4fd5d129-b983-4138-bb14-8ea03256faf1" containerID="441c72cee2c5e72d4e53ea0a81e15da8574cf9c270429fe0185a2deb0aa7d438" exitCode=0 Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.499316 4753 generic.go:334] "Generic (PLEG): container finished" podID="4fd5d129-b983-4138-bb14-8ea03256faf1" containerID="4d7e08e6a56c80be78a2d46dfa801d7d7e10e6d9458ac719f89618c57073a60f" exitCode=143 Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.499059 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-api-0" Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.499079 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"4fd5d129-b983-4138-bb14-8ea03256faf1","Type":"ContainerDied","Data":"441c72cee2c5e72d4e53ea0a81e15da8574cf9c270429fe0185a2deb0aa7d438"} Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.499444 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"4fd5d129-b983-4138-bb14-8ea03256faf1","Type":"ContainerDied","Data":"4d7e08e6a56c80be78a2d46dfa801d7d7e10e6d9458ac719f89618c57073a60f"} Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.499459 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"4fd5d129-b983-4138-bb14-8ea03256faf1","Type":"ContainerDied","Data":"b456ac63204cbafeb7f936dedce8411b3f9dc805f9e851d35afea78a595491d8"} Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.499476 4753 scope.go:117] "RemoveContainer" containerID="441c72cee2c5e72d4e53ea0a81e15da8574cf9c270429fe0185a2deb0aa7d438" Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.500354 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cloudkitty-proc-0" podUID="15b8aae9-f3b8-4227-9d94-23d10aef377a" containerName="cloudkitty-proc" containerID="cri-o://f70b9579e57dcb8bcaa9103e83217045d19e37c2221b5c2e76b83fbee42b1a6b" gracePeriod=30 Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.550939 4753 scope.go:117] "RemoveContainer" containerID="4d7e08e6a56c80be78a2d46dfa801d7d7e10e6d9458ac719f89618c57073a60f" Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.551083 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-api-0"] Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.568940 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cloudkitty-api-0"] Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.583296 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-api-0"] Dec 05 17:35:08 crc kubenswrapper[4753]: E1205 17:35:08.583806 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4fd5d129-b983-4138-bb14-8ea03256faf1" containerName="cloudkitty-api" Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.583822 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="4fd5d129-b983-4138-bb14-8ea03256faf1" containerName="cloudkitty-api" Dec 05 17:35:08 crc kubenswrapper[4753]: E1205 17:35:08.583844 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="484f9ea6-0202-42e7-bcf5-619d5b3176b4" containerName="dnsmasq-dns" Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.583851 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="484f9ea6-0202-42e7-bcf5-619d5b3176b4" containerName="dnsmasq-dns" Dec 05 17:35:08 crc kubenswrapper[4753]: E1205 17:35:08.583867 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4fd5d129-b983-4138-bb14-8ea03256faf1" containerName="cloudkitty-api-log" Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.583873 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="4fd5d129-b983-4138-bb14-8ea03256faf1" containerName="cloudkitty-api-log" Dec 05 17:35:08 crc kubenswrapper[4753]: E1205 17:35:08.583897 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="484f9ea6-0202-42e7-bcf5-619d5b3176b4" containerName="init" Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.583902 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="484f9ea6-0202-42e7-bcf5-619d5b3176b4" containerName="init" Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.584094 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="4fd5d129-b983-4138-bb14-8ea03256faf1" containerName="cloudkitty-api-log" Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.584113 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="4fd5d129-b983-4138-bb14-8ea03256faf1" containerName="cloudkitty-api" Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.584128 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="484f9ea6-0202-42e7-bcf5-619d5b3176b4" containerName="dnsmasq-dns" Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.585306 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-api-0" Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.597797 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cloudkitty-internal-svc" Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.598449 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cloudkitty-public-svc" Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.598581 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-api-config-data" Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.599012 4753 scope.go:117] "RemoveContainer" containerID="441c72cee2c5e72d4e53ea0a81e15da8574cf9c270429fe0185a2deb0aa7d438" Dec 05 17:35:08 crc kubenswrapper[4753]: E1205 17:35:08.602270 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"441c72cee2c5e72d4e53ea0a81e15da8574cf9c270429fe0185a2deb0aa7d438\": container with ID starting with 441c72cee2c5e72d4e53ea0a81e15da8574cf9c270429fe0185a2deb0aa7d438 not found: ID does not exist" containerID="441c72cee2c5e72d4e53ea0a81e15da8574cf9c270429fe0185a2deb0aa7d438" Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.602307 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"441c72cee2c5e72d4e53ea0a81e15da8574cf9c270429fe0185a2deb0aa7d438"} err="failed to get container status \"441c72cee2c5e72d4e53ea0a81e15da8574cf9c270429fe0185a2deb0aa7d438\": rpc error: code = NotFound desc = could not find container \"441c72cee2c5e72d4e53ea0a81e15da8574cf9c270429fe0185a2deb0aa7d438\": container with ID starting with 441c72cee2c5e72d4e53ea0a81e15da8574cf9c270429fe0185a2deb0aa7d438 not found: ID does not exist" Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.602327 4753 scope.go:117] "RemoveContainer" containerID="4d7e08e6a56c80be78a2d46dfa801d7d7e10e6d9458ac719f89618c57073a60f" Dec 05 17:35:08 crc kubenswrapper[4753]: E1205 17:35:08.619856 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4d7e08e6a56c80be78a2d46dfa801d7d7e10e6d9458ac719f89618c57073a60f\": container with ID starting with 4d7e08e6a56c80be78a2d46dfa801d7d7e10e6d9458ac719f89618c57073a60f not found: ID does not exist" containerID="4d7e08e6a56c80be78a2d46dfa801d7d7e10e6d9458ac719f89618c57073a60f" Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.619899 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4d7e08e6a56c80be78a2d46dfa801d7d7e10e6d9458ac719f89618c57073a60f"} err="failed to get container status \"4d7e08e6a56c80be78a2d46dfa801d7d7e10e6d9458ac719f89618c57073a60f\": rpc error: code = NotFound desc = could not find container \"4d7e08e6a56c80be78a2d46dfa801d7d7e10e6d9458ac719f89618c57073a60f\": container with ID starting with 4d7e08e6a56c80be78a2d46dfa801d7d7e10e6d9458ac719f89618c57073a60f not found: ID does not exist" Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.619925 4753 scope.go:117] "RemoveContainer" containerID="441c72cee2c5e72d4e53ea0a81e15da8574cf9c270429fe0185a2deb0aa7d438" Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.621336 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"441c72cee2c5e72d4e53ea0a81e15da8574cf9c270429fe0185a2deb0aa7d438"} err="failed to get container status \"441c72cee2c5e72d4e53ea0a81e15da8574cf9c270429fe0185a2deb0aa7d438\": rpc error: code = NotFound desc = could not find container \"441c72cee2c5e72d4e53ea0a81e15da8574cf9c270429fe0185a2deb0aa7d438\": container with ID starting with 441c72cee2c5e72d4e53ea0a81e15da8574cf9c270429fe0185a2deb0aa7d438 not found: ID does not exist" Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.621361 4753 scope.go:117] "RemoveContainer" containerID="4d7e08e6a56c80be78a2d46dfa801d7d7e10e6d9458ac719f89618c57073a60f" Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.622602 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4d7e08e6a56c80be78a2d46dfa801d7d7e10e6d9458ac719f89618c57073a60f"} err="failed to get container status \"4d7e08e6a56c80be78a2d46dfa801d7d7e10e6d9458ac719f89618c57073a60f\": rpc error: code = NotFound desc = could not find container \"4d7e08e6a56c80be78a2d46dfa801d7d7e10e6d9458ac719f89618c57073a60f\": container with ID starting with 4d7e08e6a56c80be78a2d46dfa801d7d7e10e6d9458ac719f89618c57073a60f not found: ID does not exist" Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.623312 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-api-0"] Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.699335 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cb0f0974-bb6e-4777-a4b0-bee542faf6b5-config-data\") pod \"cloudkitty-api-0\" (UID: \"cb0f0974-bb6e-4777-a4b0-bee542faf6b5\") " pod="openstack/cloudkitty-api-0" Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.699429 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cb0f0974-bb6e-4777-a4b0-bee542faf6b5-logs\") pod \"cloudkitty-api-0\" (UID: \"cb0f0974-bb6e-4777-a4b0-bee542faf6b5\") " pod="openstack/cloudkitty-api-0" Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.699467 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb0f0974-bb6e-4777-a4b0-bee542faf6b5-combined-ca-bundle\") pod \"cloudkitty-api-0\" (UID: \"cb0f0974-bb6e-4777-a4b0-bee542faf6b5\") " pod="openstack/cloudkitty-api-0" Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.699527 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cb0f0974-bb6e-4777-a4b0-bee542faf6b5-public-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"cb0f0974-bb6e-4777-a4b0-bee542faf6b5\") " pod="openstack/cloudkitty-api-0" Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.699581 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cb0f0974-bb6e-4777-a4b0-bee542faf6b5-internal-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"cb0f0974-bb6e-4777-a4b0-bee542faf6b5\") " pod="openstack/cloudkitty-api-0" Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.699629 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lbm96\" (UniqueName: \"kubernetes.io/projected/cb0f0974-bb6e-4777-a4b0-bee542faf6b5-kube-api-access-lbm96\") pod \"cloudkitty-api-0\" (UID: \"cb0f0974-bb6e-4777-a4b0-bee542faf6b5\") " pod="openstack/cloudkitty-api-0" Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.699649 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cb0f0974-bb6e-4777-a4b0-bee542faf6b5-scripts\") pod \"cloudkitty-api-0\" (UID: \"cb0f0974-bb6e-4777-a4b0-bee542faf6b5\") " pod="openstack/cloudkitty-api-0" Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.699680 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/cb0f0974-bb6e-4777-a4b0-bee542faf6b5-certs\") pod \"cloudkitty-api-0\" (UID: \"cb0f0974-bb6e-4777-a4b0-bee542faf6b5\") " pod="openstack/cloudkitty-api-0" Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.699695 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cb0f0974-bb6e-4777-a4b0-bee542faf6b5-config-data-custom\") pod \"cloudkitty-api-0\" (UID: \"cb0f0974-bb6e-4777-a4b0-bee542faf6b5\") " pod="openstack/cloudkitty-api-0" Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.800911 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cb0f0974-bb6e-4777-a4b0-bee542faf6b5-internal-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"cb0f0974-bb6e-4777-a4b0-bee542faf6b5\") " pod="openstack/cloudkitty-api-0" Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.801004 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lbm96\" (UniqueName: \"kubernetes.io/projected/cb0f0974-bb6e-4777-a4b0-bee542faf6b5-kube-api-access-lbm96\") pod \"cloudkitty-api-0\" (UID: \"cb0f0974-bb6e-4777-a4b0-bee542faf6b5\") " pod="openstack/cloudkitty-api-0" Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.801037 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cb0f0974-bb6e-4777-a4b0-bee542faf6b5-scripts\") pod \"cloudkitty-api-0\" (UID: \"cb0f0974-bb6e-4777-a4b0-bee542faf6b5\") " pod="openstack/cloudkitty-api-0" Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.801088 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/cb0f0974-bb6e-4777-a4b0-bee542faf6b5-certs\") pod \"cloudkitty-api-0\" (UID: \"cb0f0974-bb6e-4777-a4b0-bee542faf6b5\") " pod="openstack/cloudkitty-api-0" Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.801105 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cb0f0974-bb6e-4777-a4b0-bee542faf6b5-config-data-custom\") pod \"cloudkitty-api-0\" (UID: \"cb0f0974-bb6e-4777-a4b0-bee542faf6b5\") " pod="openstack/cloudkitty-api-0" Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.801123 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cb0f0974-bb6e-4777-a4b0-bee542faf6b5-config-data\") pod \"cloudkitty-api-0\" (UID: \"cb0f0974-bb6e-4777-a4b0-bee542faf6b5\") " pod="openstack/cloudkitty-api-0" Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.801281 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cb0f0974-bb6e-4777-a4b0-bee542faf6b5-logs\") pod \"cloudkitty-api-0\" (UID: \"cb0f0974-bb6e-4777-a4b0-bee542faf6b5\") " pod="openstack/cloudkitty-api-0" Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.801325 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb0f0974-bb6e-4777-a4b0-bee542faf6b5-combined-ca-bundle\") pod \"cloudkitty-api-0\" (UID: \"cb0f0974-bb6e-4777-a4b0-bee542faf6b5\") " pod="openstack/cloudkitty-api-0" Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.801354 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cb0f0974-bb6e-4777-a4b0-bee542faf6b5-public-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"cb0f0974-bb6e-4777-a4b0-bee542faf6b5\") " pod="openstack/cloudkitty-api-0" Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.802602 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cb0f0974-bb6e-4777-a4b0-bee542faf6b5-logs\") pod \"cloudkitty-api-0\" (UID: \"cb0f0974-bb6e-4777-a4b0-bee542faf6b5\") " pod="openstack/cloudkitty-api-0" Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.809592 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/projected/cb0f0974-bb6e-4777-a4b0-bee542faf6b5-certs\") pod \"cloudkitty-api-0\" (UID: \"cb0f0974-bb6e-4777-a4b0-bee542faf6b5\") " pod="openstack/cloudkitty-api-0" Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.809668 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb0f0974-bb6e-4777-a4b0-bee542faf6b5-combined-ca-bundle\") pod \"cloudkitty-api-0\" (UID: \"cb0f0974-bb6e-4777-a4b0-bee542faf6b5\") " pod="openstack/cloudkitty-api-0" Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.810528 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cb0f0974-bb6e-4777-a4b0-bee542faf6b5-internal-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"cb0f0974-bb6e-4777-a4b0-bee542faf6b5\") " pod="openstack/cloudkitty-api-0" Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.811920 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cb0f0974-bb6e-4777-a4b0-bee542faf6b5-config-data\") pod \"cloudkitty-api-0\" (UID: \"cb0f0974-bb6e-4777-a4b0-bee542faf6b5\") " pod="openstack/cloudkitty-api-0" Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.818761 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cb0f0974-bb6e-4777-a4b0-bee542faf6b5-public-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"cb0f0974-bb6e-4777-a4b0-bee542faf6b5\") " pod="openstack/cloudkitty-api-0" Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.819326 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cb0f0974-bb6e-4777-a4b0-bee542faf6b5-scripts\") pod \"cloudkitty-api-0\" (UID: \"cb0f0974-bb6e-4777-a4b0-bee542faf6b5\") " pod="openstack/cloudkitty-api-0" Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.822730 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cb0f0974-bb6e-4777-a4b0-bee542faf6b5-config-data-custom\") pod \"cloudkitty-api-0\" (UID: \"cb0f0974-bb6e-4777-a4b0-bee542faf6b5\") " pod="openstack/cloudkitty-api-0" Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.835437 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lbm96\" (UniqueName: \"kubernetes.io/projected/cb0f0974-bb6e-4777-a4b0-bee542faf6b5-kube-api-access-lbm96\") pod \"cloudkitty-api-0\" (UID: \"cb0f0974-bb6e-4777-a4b0-bee542faf6b5\") " pod="openstack/cloudkitty-api-0" Dec 05 17:35:08 crc kubenswrapper[4753]: I1205 17:35:08.924185 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-api-0" Dec 05 17:35:09 crc kubenswrapper[4753]: I1205 17:35:09.478862 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-api-0"] Dec 05 17:35:09 crc kubenswrapper[4753]: I1205 17:35:09.524381 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"cb0f0974-bb6e-4777-a4b0-bee542faf6b5","Type":"ContainerStarted","Data":"ae9458ae39c2edb69902ab07769b3fa45d8de44ef451ad335063ffaeddba4474"} Dec 05 17:35:09 crc kubenswrapper[4753]: I1205 17:35:09.765227 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4fd5d129-b983-4138-bb14-8ea03256faf1" path="/var/lib/kubelet/pods/4fd5d129-b983-4138-bb14-8ea03256faf1/volumes" Dec 05 17:35:10 crc kubenswrapper[4753]: I1205 17:35:10.248921 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-66c6899d8d-whfkr" Dec 05 17:35:10 crc kubenswrapper[4753]: I1205 17:35:10.366737 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6686b0a0-8673-4e80-9763-64bc2d17834b-combined-ca-bundle\") pod \"6686b0a0-8673-4e80-9763-64bc2d17834b\" (UID: \"6686b0a0-8673-4e80-9763-64bc2d17834b\") " Dec 05 17:35:10 crc kubenswrapper[4753]: I1205 17:35:10.366881 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5tjwg\" (UniqueName: \"kubernetes.io/projected/6686b0a0-8673-4e80-9763-64bc2d17834b-kube-api-access-5tjwg\") pod \"6686b0a0-8673-4e80-9763-64bc2d17834b\" (UID: \"6686b0a0-8673-4e80-9763-64bc2d17834b\") " Dec 05 17:35:10 crc kubenswrapper[4753]: I1205 17:35:10.366972 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6686b0a0-8673-4e80-9763-64bc2d17834b-logs\") pod \"6686b0a0-8673-4e80-9763-64bc2d17834b\" (UID: \"6686b0a0-8673-4e80-9763-64bc2d17834b\") " Dec 05 17:35:10 crc kubenswrapper[4753]: I1205 17:35:10.366995 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6686b0a0-8673-4e80-9763-64bc2d17834b-config-data-custom\") pod \"6686b0a0-8673-4e80-9763-64bc2d17834b\" (UID: \"6686b0a0-8673-4e80-9763-64bc2d17834b\") " Dec 05 17:35:10 crc kubenswrapper[4753]: I1205 17:35:10.367113 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6686b0a0-8673-4e80-9763-64bc2d17834b-config-data\") pod \"6686b0a0-8673-4e80-9763-64bc2d17834b\" (UID: \"6686b0a0-8673-4e80-9763-64bc2d17834b\") " Dec 05 17:35:10 crc kubenswrapper[4753]: I1205 17:35:10.369077 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6686b0a0-8673-4e80-9763-64bc2d17834b-logs" (OuterVolumeSpecName: "logs") pod "6686b0a0-8673-4e80-9763-64bc2d17834b" (UID: "6686b0a0-8673-4e80-9763-64bc2d17834b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:35:10 crc kubenswrapper[4753]: I1205 17:35:10.384673 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6686b0a0-8673-4e80-9763-64bc2d17834b-kube-api-access-5tjwg" (OuterVolumeSpecName: "kube-api-access-5tjwg") pod "6686b0a0-8673-4e80-9763-64bc2d17834b" (UID: "6686b0a0-8673-4e80-9763-64bc2d17834b"). InnerVolumeSpecName "kube-api-access-5tjwg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:35:10 crc kubenswrapper[4753]: I1205 17:35:10.385077 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6686b0a0-8673-4e80-9763-64bc2d17834b-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "6686b0a0-8673-4e80-9763-64bc2d17834b" (UID: "6686b0a0-8673-4e80-9763-64bc2d17834b"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:35:10 crc kubenswrapper[4753]: I1205 17:35:10.428598 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6686b0a0-8673-4e80-9763-64bc2d17834b-config-data" (OuterVolumeSpecName: "config-data") pod "6686b0a0-8673-4e80-9763-64bc2d17834b" (UID: "6686b0a0-8673-4e80-9763-64bc2d17834b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:35:10 crc kubenswrapper[4753]: I1205 17:35:10.444530 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6686b0a0-8673-4e80-9763-64bc2d17834b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6686b0a0-8673-4e80-9763-64bc2d17834b" (UID: "6686b0a0-8673-4e80-9763-64bc2d17834b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:35:10 crc kubenswrapper[4753]: I1205 17:35:10.471798 4753 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6686b0a0-8673-4e80-9763-64bc2d17834b-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:10 crc kubenswrapper[4753]: I1205 17:35:10.471832 4753 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6686b0a0-8673-4e80-9763-64bc2d17834b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:10 crc kubenswrapper[4753]: I1205 17:35:10.471862 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5tjwg\" (UniqueName: \"kubernetes.io/projected/6686b0a0-8673-4e80-9763-64bc2d17834b-kube-api-access-5tjwg\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:10 crc kubenswrapper[4753]: I1205 17:35:10.471872 4753 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6686b0a0-8673-4e80-9763-64bc2d17834b-logs\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:10 crc kubenswrapper[4753]: I1205 17:35:10.471881 4753 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6686b0a0-8673-4e80-9763-64bc2d17834b-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:10 crc kubenswrapper[4753]: I1205 17:35:10.485781 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-proc-0" Dec 05 17:35:10 crc kubenswrapper[4753]: I1205 17:35:10.535405 4753 generic.go:334] "Generic (PLEG): container finished" podID="6686b0a0-8673-4e80-9763-64bc2d17834b" containerID="16a611b787135d2752ac8dd584d360f5edaa9fc39463d3e19413fe8aff788e4c" exitCode=0 Dec 05 17:35:10 crc kubenswrapper[4753]: I1205 17:35:10.535463 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-66c6899d8d-whfkr" event={"ID":"6686b0a0-8673-4e80-9763-64bc2d17834b","Type":"ContainerDied","Data":"16a611b787135d2752ac8dd584d360f5edaa9fc39463d3e19413fe8aff788e4c"} Dec 05 17:35:10 crc kubenswrapper[4753]: I1205 17:35:10.535494 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-66c6899d8d-whfkr" event={"ID":"6686b0a0-8673-4e80-9763-64bc2d17834b","Type":"ContainerDied","Data":"8399da123101c0dc947d00005889895a4ab4ddd66c0d13f7afbcf9c65362849c"} Dec 05 17:35:10 crc kubenswrapper[4753]: I1205 17:35:10.535514 4753 scope.go:117] "RemoveContainer" containerID="16a611b787135d2752ac8dd584d360f5edaa9fc39463d3e19413fe8aff788e4c" Dec 05 17:35:10 crc kubenswrapper[4753]: I1205 17:35:10.535660 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-66c6899d8d-whfkr" Dec 05 17:35:10 crc kubenswrapper[4753]: I1205 17:35:10.557014 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"cb0f0974-bb6e-4777-a4b0-bee542faf6b5","Type":"ContainerStarted","Data":"ffbbf829573ca5b64ede67e0203bb54f37b12f61186c5cfd7954ba5df79a7cb3"} Dec 05 17:35:10 crc kubenswrapper[4753]: I1205 17:35:10.557068 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"cb0f0974-bb6e-4777-a4b0-bee542faf6b5","Type":"ContainerStarted","Data":"10dbbb352ffa630c5c380ecaffb8af92854cefeb746affb85588e507d0d71f37"} Dec 05 17:35:10 crc kubenswrapper[4753]: I1205 17:35:10.558249 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-api-0" Dec 05 17:35:10 crc kubenswrapper[4753]: I1205 17:35:10.563508 4753 generic.go:334] "Generic (PLEG): container finished" podID="15b8aae9-f3b8-4227-9d94-23d10aef377a" containerID="f70b9579e57dcb8bcaa9103e83217045d19e37c2221b5c2e76b83fbee42b1a6b" exitCode=0 Dec 05 17:35:10 crc kubenswrapper[4753]: I1205 17:35:10.563558 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"15b8aae9-f3b8-4227-9d94-23d10aef377a","Type":"ContainerDied","Data":"f70b9579e57dcb8bcaa9103e83217045d19e37c2221b5c2e76b83fbee42b1a6b"} Dec 05 17:35:10 crc kubenswrapper[4753]: I1205 17:35:10.563587 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"15b8aae9-f3b8-4227-9d94-23d10aef377a","Type":"ContainerDied","Data":"fdc7049b451a037f55820eb0d9429a266235aab5c30488d76c0fe25abeab452e"} Dec 05 17:35:10 crc kubenswrapper[4753]: I1205 17:35:10.563643 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-proc-0" Dec 05 17:35:10 crc kubenswrapper[4753]: I1205 17:35:10.572609 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-llhlx\" (UniqueName: \"kubernetes.io/projected/15b8aae9-f3b8-4227-9d94-23d10aef377a-kube-api-access-llhlx\") pod \"15b8aae9-f3b8-4227-9d94-23d10aef377a\" (UID: \"15b8aae9-f3b8-4227-9d94-23d10aef377a\") " Dec 05 17:35:10 crc kubenswrapper[4753]: I1205 17:35:10.572707 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15b8aae9-f3b8-4227-9d94-23d10aef377a-config-data\") pod \"15b8aae9-f3b8-4227-9d94-23d10aef377a\" (UID: \"15b8aae9-f3b8-4227-9d94-23d10aef377a\") " Dec 05 17:35:10 crc kubenswrapper[4753]: I1205 17:35:10.572765 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15b8aae9-f3b8-4227-9d94-23d10aef377a-combined-ca-bundle\") pod \"15b8aae9-f3b8-4227-9d94-23d10aef377a\" (UID: \"15b8aae9-f3b8-4227-9d94-23d10aef377a\") " Dec 05 17:35:10 crc kubenswrapper[4753]: I1205 17:35:10.572800 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/15b8aae9-f3b8-4227-9d94-23d10aef377a-scripts\") pod \"15b8aae9-f3b8-4227-9d94-23d10aef377a\" (UID: \"15b8aae9-f3b8-4227-9d94-23d10aef377a\") " Dec 05 17:35:10 crc kubenswrapper[4753]: I1205 17:35:10.572866 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/15b8aae9-f3b8-4227-9d94-23d10aef377a-certs\") pod \"15b8aae9-f3b8-4227-9d94-23d10aef377a\" (UID: \"15b8aae9-f3b8-4227-9d94-23d10aef377a\") " Dec 05 17:35:10 crc kubenswrapper[4753]: I1205 17:35:10.572930 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/15b8aae9-f3b8-4227-9d94-23d10aef377a-config-data-custom\") pod \"15b8aae9-f3b8-4227-9d94-23d10aef377a\" (UID: \"15b8aae9-f3b8-4227-9d94-23d10aef377a\") " Dec 05 17:35:10 crc kubenswrapper[4753]: I1205 17:35:10.580281 4753 scope.go:117] "RemoveContainer" containerID="d0c9e642b21e6139c5a66b59c596c1f7aa3e4672d9fe42c96f474da7a4e9e87d" Dec 05 17:35:10 crc kubenswrapper[4753]: I1205 17:35:10.589277 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-66c6899d8d-whfkr"] Dec 05 17:35:10 crc kubenswrapper[4753]: I1205 17:35:10.597402 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15b8aae9-f3b8-4227-9d94-23d10aef377a-scripts" (OuterVolumeSpecName: "scripts") pod "15b8aae9-f3b8-4227-9d94-23d10aef377a" (UID: "15b8aae9-f3b8-4227-9d94-23d10aef377a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:35:10 crc kubenswrapper[4753]: I1205 17:35:10.601367 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15b8aae9-f3b8-4227-9d94-23d10aef377a-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "15b8aae9-f3b8-4227-9d94-23d10aef377a" (UID: "15b8aae9-f3b8-4227-9d94-23d10aef377a"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:35:10 crc kubenswrapper[4753]: I1205 17:35:10.605428 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/15b8aae9-f3b8-4227-9d94-23d10aef377a-certs" (OuterVolumeSpecName: "certs") pod "15b8aae9-f3b8-4227-9d94-23d10aef377a" (UID: "15b8aae9-f3b8-4227-9d94-23d10aef377a"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:35:10 crc kubenswrapper[4753]: I1205 17:35:10.606017 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/15b8aae9-f3b8-4227-9d94-23d10aef377a-kube-api-access-llhlx" (OuterVolumeSpecName: "kube-api-access-llhlx") pod "15b8aae9-f3b8-4227-9d94-23d10aef377a" (UID: "15b8aae9-f3b8-4227-9d94-23d10aef377a"). InnerVolumeSpecName "kube-api-access-llhlx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:35:10 crc kubenswrapper[4753]: I1205 17:35:10.621631 4753 scope.go:117] "RemoveContainer" containerID="16a611b787135d2752ac8dd584d360f5edaa9fc39463d3e19413fe8aff788e4c" Dec 05 17:35:10 crc kubenswrapper[4753]: E1205 17:35:10.626267 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"16a611b787135d2752ac8dd584d360f5edaa9fc39463d3e19413fe8aff788e4c\": container with ID starting with 16a611b787135d2752ac8dd584d360f5edaa9fc39463d3e19413fe8aff788e4c not found: ID does not exist" containerID="16a611b787135d2752ac8dd584d360f5edaa9fc39463d3e19413fe8aff788e4c" Dec 05 17:35:10 crc kubenswrapper[4753]: I1205 17:35:10.626474 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"16a611b787135d2752ac8dd584d360f5edaa9fc39463d3e19413fe8aff788e4c"} err="failed to get container status \"16a611b787135d2752ac8dd584d360f5edaa9fc39463d3e19413fe8aff788e4c\": rpc error: code = NotFound desc = could not find container \"16a611b787135d2752ac8dd584d360f5edaa9fc39463d3e19413fe8aff788e4c\": container with ID starting with 16a611b787135d2752ac8dd584d360f5edaa9fc39463d3e19413fe8aff788e4c not found: ID does not exist" Dec 05 17:35:10 crc kubenswrapper[4753]: I1205 17:35:10.626507 4753 scope.go:117] "RemoveContainer" containerID="d0c9e642b21e6139c5a66b59c596c1f7aa3e4672d9fe42c96f474da7a4e9e87d" Dec 05 17:35:10 crc kubenswrapper[4753]: I1205 17:35:10.627436 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-66c6899d8d-whfkr"] Dec 05 17:35:10 crc kubenswrapper[4753]: I1205 17:35:10.636562 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-api-0" podStartSLOduration=2.636541216 podStartE2EDuration="2.636541216s" podCreationTimestamp="2025-12-05 17:35:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:35:10.6010963 +0000 UTC m=+1849.104203306" watchObservedRunningTime="2025-12-05 17:35:10.636541216 +0000 UTC m=+1849.139648222" Dec 05 17:35:10 crc kubenswrapper[4753]: I1205 17:35:10.639406 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15b8aae9-f3b8-4227-9d94-23d10aef377a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "15b8aae9-f3b8-4227-9d94-23d10aef377a" (UID: "15b8aae9-f3b8-4227-9d94-23d10aef377a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:35:10 crc kubenswrapper[4753]: E1205 17:35:10.642454 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d0c9e642b21e6139c5a66b59c596c1f7aa3e4672d9fe42c96f474da7a4e9e87d\": container with ID starting with d0c9e642b21e6139c5a66b59c596c1f7aa3e4672d9fe42c96f474da7a4e9e87d not found: ID does not exist" containerID="d0c9e642b21e6139c5a66b59c596c1f7aa3e4672d9fe42c96f474da7a4e9e87d" Dec 05 17:35:10 crc kubenswrapper[4753]: I1205 17:35:10.642502 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d0c9e642b21e6139c5a66b59c596c1f7aa3e4672d9fe42c96f474da7a4e9e87d"} err="failed to get container status \"d0c9e642b21e6139c5a66b59c596c1f7aa3e4672d9fe42c96f474da7a4e9e87d\": rpc error: code = NotFound desc = could not find container \"d0c9e642b21e6139c5a66b59c596c1f7aa3e4672d9fe42c96f474da7a4e9e87d\": container with ID starting with d0c9e642b21e6139c5a66b59c596c1f7aa3e4672d9fe42c96f474da7a4e9e87d not found: ID does not exist" Dec 05 17:35:10 crc kubenswrapper[4753]: I1205 17:35:10.642531 4753 scope.go:117] "RemoveContainer" containerID="f70b9579e57dcb8bcaa9103e83217045d19e37c2221b5c2e76b83fbee42b1a6b" Dec 05 17:35:10 crc kubenswrapper[4753]: I1205 17:35:10.675172 4753 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15b8aae9-f3b8-4227-9d94-23d10aef377a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:10 crc kubenswrapper[4753]: I1205 17:35:10.675210 4753 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/15b8aae9-f3b8-4227-9d94-23d10aef377a-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:10 crc kubenswrapper[4753]: I1205 17:35:10.675220 4753 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/projected/15b8aae9-f3b8-4227-9d94-23d10aef377a-certs\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:10 crc kubenswrapper[4753]: I1205 17:35:10.675229 4753 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/15b8aae9-f3b8-4227-9d94-23d10aef377a-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:10 crc kubenswrapper[4753]: I1205 17:35:10.675239 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-llhlx\" (UniqueName: \"kubernetes.io/projected/15b8aae9-f3b8-4227-9d94-23d10aef377a-kube-api-access-llhlx\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:10 crc kubenswrapper[4753]: I1205 17:35:10.678342 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/15b8aae9-f3b8-4227-9d94-23d10aef377a-config-data" (OuterVolumeSpecName: "config-data") pod "15b8aae9-f3b8-4227-9d94-23d10aef377a" (UID: "15b8aae9-f3b8-4227-9d94-23d10aef377a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:35:10 crc kubenswrapper[4753]: I1205 17:35:10.716324 4753 scope.go:117] "RemoveContainer" containerID="f70b9579e57dcb8bcaa9103e83217045d19e37c2221b5c2e76b83fbee42b1a6b" Dec 05 17:35:10 crc kubenswrapper[4753]: E1205 17:35:10.721308 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f70b9579e57dcb8bcaa9103e83217045d19e37c2221b5c2e76b83fbee42b1a6b\": container with ID starting with f70b9579e57dcb8bcaa9103e83217045d19e37c2221b5c2e76b83fbee42b1a6b not found: ID does not exist" containerID="f70b9579e57dcb8bcaa9103e83217045d19e37c2221b5c2e76b83fbee42b1a6b" Dec 05 17:35:10 crc kubenswrapper[4753]: I1205 17:35:10.721353 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f70b9579e57dcb8bcaa9103e83217045d19e37c2221b5c2e76b83fbee42b1a6b"} err="failed to get container status \"f70b9579e57dcb8bcaa9103e83217045d19e37c2221b5c2e76b83fbee42b1a6b\": rpc error: code = NotFound desc = could not find container \"f70b9579e57dcb8bcaa9103e83217045d19e37c2221b5c2e76b83fbee42b1a6b\": container with ID starting with f70b9579e57dcb8bcaa9103e83217045d19e37c2221b5c2e76b83fbee42b1a6b not found: ID does not exist" Dec 05 17:35:10 crc kubenswrapper[4753]: I1205 17:35:10.777219 4753 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15b8aae9-f3b8-4227-9d94-23d10aef377a-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:10 crc kubenswrapper[4753]: I1205 17:35:10.903989 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 05 17:35:10 crc kubenswrapper[4753]: I1205 17:35:10.913870 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 05 17:35:10 crc kubenswrapper[4753]: I1205 17:35:10.930929 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 05 17:35:10 crc kubenswrapper[4753]: E1205 17:35:10.931423 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15b8aae9-f3b8-4227-9d94-23d10aef377a" containerName="cloudkitty-proc" Dec 05 17:35:10 crc kubenswrapper[4753]: I1205 17:35:10.931440 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="15b8aae9-f3b8-4227-9d94-23d10aef377a" containerName="cloudkitty-proc" Dec 05 17:35:10 crc kubenswrapper[4753]: E1205 17:35:10.931469 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6686b0a0-8673-4e80-9763-64bc2d17834b" containerName="barbican-api" Dec 05 17:35:10 crc kubenswrapper[4753]: I1205 17:35:10.931477 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="6686b0a0-8673-4e80-9763-64bc2d17834b" containerName="barbican-api" Dec 05 17:35:10 crc kubenswrapper[4753]: E1205 17:35:10.931490 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6686b0a0-8673-4e80-9763-64bc2d17834b" containerName="barbican-api-log" Dec 05 17:35:10 crc kubenswrapper[4753]: I1205 17:35:10.931496 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="6686b0a0-8673-4e80-9763-64bc2d17834b" containerName="barbican-api-log" Dec 05 17:35:10 crc kubenswrapper[4753]: I1205 17:35:10.931671 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="15b8aae9-f3b8-4227-9d94-23d10aef377a" containerName="cloudkitty-proc" Dec 05 17:35:10 crc kubenswrapper[4753]: I1205 17:35:10.931691 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="6686b0a0-8673-4e80-9763-64bc2d17834b" containerName="barbican-api-log" Dec 05 17:35:10 crc kubenswrapper[4753]: I1205 17:35:10.931708 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="6686b0a0-8673-4e80-9763-64bc2d17834b" containerName="barbican-api" Dec 05 17:35:10 crc kubenswrapper[4753]: I1205 17:35:10.932467 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-proc-0" Dec 05 17:35:10 crc kubenswrapper[4753]: I1205 17:35:10.935127 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-proc-config-data" Dec 05 17:35:10 crc kubenswrapper[4753]: I1205 17:35:10.948255 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 05 17:35:11 crc kubenswrapper[4753]: I1205 17:35:11.082396 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3e74a0c-459c-4ecd-be3b-9eafb9656068-config-data\") pod \"cloudkitty-proc-0\" (UID: \"e3e74a0c-459c-4ecd-be3b-9eafb9656068\") " pod="openstack/cloudkitty-proc-0" Dec 05 17:35:11 crc kubenswrapper[4753]: I1205 17:35:11.082467 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e3e74a0c-459c-4ecd-be3b-9eafb9656068-config-data-custom\") pod \"cloudkitty-proc-0\" (UID: \"e3e74a0c-459c-4ecd-be3b-9eafb9656068\") " pod="openstack/cloudkitty-proc-0" Dec 05 17:35:11 crc kubenswrapper[4753]: I1205 17:35:11.082510 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/e3e74a0c-459c-4ecd-be3b-9eafb9656068-certs\") pod \"cloudkitty-proc-0\" (UID: \"e3e74a0c-459c-4ecd-be3b-9eafb9656068\") " pod="openstack/cloudkitty-proc-0" Dec 05 17:35:11 crc kubenswrapper[4753]: I1205 17:35:11.082550 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e3e74a0c-459c-4ecd-be3b-9eafb9656068-scripts\") pod \"cloudkitty-proc-0\" (UID: \"e3e74a0c-459c-4ecd-be3b-9eafb9656068\") " pod="openstack/cloudkitty-proc-0" Dec 05 17:35:11 crc kubenswrapper[4753]: I1205 17:35:11.082631 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6dd97\" (UniqueName: \"kubernetes.io/projected/e3e74a0c-459c-4ecd-be3b-9eafb9656068-kube-api-access-6dd97\") pod \"cloudkitty-proc-0\" (UID: \"e3e74a0c-459c-4ecd-be3b-9eafb9656068\") " pod="openstack/cloudkitty-proc-0" Dec 05 17:35:11 crc kubenswrapper[4753]: I1205 17:35:11.082910 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3e74a0c-459c-4ecd-be3b-9eafb9656068-combined-ca-bundle\") pod \"cloudkitty-proc-0\" (UID: \"e3e74a0c-459c-4ecd-be3b-9eafb9656068\") " pod="openstack/cloudkitty-proc-0" Dec 05 17:35:11 crc kubenswrapper[4753]: I1205 17:35:11.185398 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3e74a0c-459c-4ecd-be3b-9eafb9656068-combined-ca-bundle\") pod \"cloudkitty-proc-0\" (UID: \"e3e74a0c-459c-4ecd-be3b-9eafb9656068\") " pod="openstack/cloudkitty-proc-0" Dec 05 17:35:11 crc kubenswrapper[4753]: I1205 17:35:11.185532 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3e74a0c-459c-4ecd-be3b-9eafb9656068-config-data\") pod \"cloudkitty-proc-0\" (UID: \"e3e74a0c-459c-4ecd-be3b-9eafb9656068\") " pod="openstack/cloudkitty-proc-0" Dec 05 17:35:11 crc kubenswrapper[4753]: I1205 17:35:11.185594 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e3e74a0c-459c-4ecd-be3b-9eafb9656068-config-data-custom\") pod \"cloudkitty-proc-0\" (UID: \"e3e74a0c-459c-4ecd-be3b-9eafb9656068\") " pod="openstack/cloudkitty-proc-0" Dec 05 17:35:11 crc kubenswrapper[4753]: I1205 17:35:11.185646 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/e3e74a0c-459c-4ecd-be3b-9eafb9656068-certs\") pod \"cloudkitty-proc-0\" (UID: \"e3e74a0c-459c-4ecd-be3b-9eafb9656068\") " pod="openstack/cloudkitty-proc-0" Dec 05 17:35:11 crc kubenswrapper[4753]: I1205 17:35:11.185680 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e3e74a0c-459c-4ecd-be3b-9eafb9656068-scripts\") pod \"cloudkitty-proc-0\" (UID: \"e3e74a0c-459c-4ecd-be3b-9eafb9656068\") " pod="openstack/cloudkitty-proc-0" Dec 05 17:35:11 crc kubenswrapper[4753]: I1205 17:35:11.185704 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6dd97\" (UniqueName: \"kubernetes.io/projected/e3e74a0c-459c-4ecd-be3b-9eafb9656068-kube-api-access-6dd97\") pod \"cloudkitty-proc-0\" (UID: \"e3e74a0c-459c-4ecd-be3b-9eafb9656068\") " pod="openstack/cloudkitty-proc-0" Dec 05 17:35:11 crc kubenswrapper[4753]: I1205 17:35:11.193325 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3e74a0c-459c-4ecd-be3b-9eafb9656068-combined-ca-bundle\") pod \"cloudkitty-proc-0\" (UID: \"e3e74a0c-459c-4ecd-be3b-9eafb9656068\") " pod="openstack/cloudkitty-proc-0" Dec 05 17:35:11 crc kubenswrapper[4753]: I1205 17:35:11.195016 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3e74a0c-459c-4ecd-be3b-9eafb9656068-config-data\") pod \"cloudkitty-proc-0\" (UID: \"e3e74a0c-459c-4ecd-be3b-9eafb9656068\") " pod="openstack/cloudkitty-proc-0" Dec 05 17:35:11 crc kubenswrapper[4753]: I1205 17:35:11.195497 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/projected/e3e74a0c-459c-4ecd-be3b-9eafb9656068-certs\") pod \"cloudkitty-proc-0\" (UID: \"e3e74a0c-459c-4ecd-be3b-9eafb9656068\") " pod="openstack/cloudkitty-proc-0" Dec 05 17:35:11 crc kubenswrapper[4753]: I1205 17:35:11.200538 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e3e74a0c-459c-4ecd-be3b-9eafb9656068-scripts\") pod \"cloudkitty-proc-0\" (UID: \"e3e74a0c-459c-4ecd-be3b-9eafb9656068\") " pod="openstack/cloudkitty-proc-0" Dec 05 17:35:11 crc kubenswrapper[4753]: I1205 17:35:11.200897 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e3e74a0c-459c-4ecd-be3b-9eafb9656068-config-data-custom\") pod \"cloudkitty-proc-0\" (UID: \"e3e74a0c-459c-4ecd-be3b-9eafb9656068\") " pod="openstack/cloudkitty-proc-0" Dec 05 17:35:11 crc kubenswrapper[4753]: I1205 17:35:11.210592 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6dd97\" (UniqueName: \"kubernetes.io/projected/e3e74a0c-459c-4ecd-be3b-9eafb9656068-kube-api-access-6dd97\") pod \"cloudkitty-proc-0\" (UID: \"e3e74a0c-459c-4ecd-be3b-9eafb9656068\") " pod="openstack/cloudkitty-proc-0" Dec 05 17:35:11 crc kubenswrapper[4753]: I1205 17:35:11.232350 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-578b67ddb8-fsb8m" Dec 05 17:35:11 crc kubenswrapper[4753]: I1205 17:35:11.294755 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-proc-0" Dec 05 17:35:11 crc kubenswrapper[4753]: I1205 17:35:11.504522 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 17:35:11 crc kubenswrapper[4753]: I1205 17:35:11.577489 4753 generic.go:334] "Generic (PLEG): container finished" podID="a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a" containerID="de921abec4ad1a36d869bb9bfd2da82c694d12d0f9e9021565874ace05d6ffcf" exitCode=0 Dec 05 17:35:11 crc kubenswrapper[4753]: I1205 17:35:11.577565 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a","Type":"ContainerDied","Data":"de921abec4ad1a36d869bb9bfd2da82c694d12d0f9e9021565874ace05d6ffcf"} Dec 05 17:35:11 crc kubenswrapper[4753]: I1205 17:35:11.577582 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 17:35:11 crc kubenswrapper[4753]: I1205 17:35:11.577597 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a","Type":"ContainerDied","Data":"c27db363a9a367b5cbec0d7b76a4d7098639d316ef155bfce61ba8eadd04e9d5"} Dec 05 17:35:11 crc kubenswrapper[4753]: I1205 17:35:11.577614 4753 scope.go:117] "RemoveContainer" containerID="d69e947bd25344ecf8280fbfa546202f34127171ef1438b3beab67053788ba94" Dec 05 17:35:11 crc kubenswrapper[4753]: I1205 17:35:11.596357 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a-config-data-custom\") pod \"a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a\" (UID: \"a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a\") " Dec 05 17:35:11 crc kubenswrapper[4753]: I1205 17:35:11.596415 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a-combined-ca-bundle\") pod \"a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a\" (UID: \"a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a\") " Dec 05 17:35:11 crc kubenswrapper[4753]: I1205 17:35:11.596438 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a-etc-machine-id\") pod \"a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a\" (UID: \"a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a\") " Dec 05 17:35:11 crc kubenswrapper[4753]: I1205 17:35:11.596458 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a-config-data\") pod \"a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a\" (UID: \"a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a\") " Dec 05 17:35:11 crc kubenswrapper[4753]: I1205 17:35:11.596601 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-94jzd\" (UniqueName: \"kubernetes.io/projected/a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a-kube-api-access-94jzd\") pod \"a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a\" (UID: \"a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a\") " Dec 05 17:35:11 crc kubenswrapper[4753]: I1205 17:35:11.597487 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a-scripts\") pod \"a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a\" (UID: \"a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a\") " Dec 05 17:35:11 crc kubenswrapper[4753]: I1205 17:35:11.598224 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a" (UID: "a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 17:35:11 crc kubenswrapper[4753]: I1205 17:35:11.609447 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a" (UID: "a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:35:11 crc kubenswrapper[4753]: I1205 17:35:11.610737 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a-scripts" (OuterVolumeSpecName: "scripts") pod "a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a" (UID: "a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:35:11 crc kubenswrapper[4753]: I1205 17:35:11.610874 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a-kube-api-access-94jzd" (OuterVolumeSpecName: "kube-api-access-94jzd") pod "a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a" (UID: "a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a"). InnerVolumeSpecName "kube-api-access-94jzd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:35:11 crc kubenswrapper[4753]: I1205 17:35:11.651799 4753 scope.go:117] "RemoveContainer" containerID="de921abec4ad1a36d869bb9bfd2da82c694d12d0f9e9021565874ace05d6ffcf" Dec 05 17:35:11 crc kubenswrapper[4753]: I1205 17:35:11.699888 4753 scope.go:117] "RemoveContainer" containerID="d69e947bd25344ecf8280fbfa546202f34127171ef1438b3beab67053788ba94" Dec 05 17:35:11 crc kubenswrapper[4753]: E1205 17:35:11.700863 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d69e947bd25344ecf8280fbfa546202f34127171ef1438b3beab67053788ba94\": container with ID starting with d69e947bd25344ecf8280fbfa546202f34127171ef1438b3beab67053788ba94 not found: ID does not exist" containerID="d69e947bd25344ecf8280fbfa546202f34127171ef1438b3beab67053788ba94" Dec 05 17:35:11 crc kubenswrapper[4753]: I1205 17:35:11.700901 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d69e947bd25344ecf8280fbfa546202f34127171ef1438b3beab67053788ba94"} err="failed to get container status \"d69e947bd25344ecf8280fbfa546202f34127171ef1438b3beab67053788ba94\": rpc error: code = NotFound desc = could not find container \"d69e947bd25344ecf8280fbfa546202f34127171ef1438b3beab67053788ba94\": container with ID starting with d69e947bd25344ecf8280fbfa546202f34127171ef1438b3beab67053788ba94 not found: ID does not exist" Dec 05 17:35:11 crc kubenswrapper[4753]: I1205 17:35:11.700931 4753 scope.go:117] "RemoveContainer" containerID="de921abec4ad1a36d869bb9bfd2da82c694d12d0f9e9021565874ace05d6ffcf" Dec 05 17:35:11 crc kubenswrapper[4753]: I1205 17:35:11.701725 4753 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:11 crc kubenswrapper[4753]: I1205 17:35:11.701752 4753 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:11 crc kubenswrapper[4753]: I1205 17:35:11.701761 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-94jzd\" (UniqueName: \"kubernetes.io/projected/a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a-kube-api-access-94jzd\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:11 crc kubenswrapper[4753]: I1205 17:35:11.701770 4753 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:11 crc kubenswrapper[4753]: E1205 17:35:11.701817 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"de921abec4ad1a36d869bb9bfd2da82c694d12d0f9e9021565874ace05d6ffcf\": container with ID starting with de921abec4ad1a36d869bb9bfd2da82c694d12d0f9e9021565874ace05d6ffcf not found: ID does not exist" containerID="de921abec4ad1a36d869bb9bfd2da82c694d12d0f9e9021565874ace05d6ffcf" Dec 05 17:35:11 crc kubenswrapper[4753]: I1205 17:35:11.701834 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"de921abec4ad1a36d869bb9bfd2da82c694d12d0f9e9021565874ace05d6ffcf"} err="failed to get container status \"de921abec4ad1a36d869bb9bfd2da82c694d12d0f9e9021565874ace05d6ffcf\": rpc error: code = NotFound desc = could not find container \"de921abec4ad1a36d869bb9bfd2da82c694d12d0f9e9021565874ace05d6ffcf\": container with ID starting with de921abec4ad1a36d869bb9bfd2da82c694d12d0f9e9021565874ace05d6ffcf not found: ID does not exist" Dec 05 17:35:11 crc kubenswrapper[4753]: I1205 17:35:11.711458 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a" (UID: "a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:35:11 crc kubenswrapper[4753]: I1205 17:35:11.740833 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="15b8aae9-f3b8-4227-9d94-23d10aef377a" path="/var/lib/kubelet/pods/15b8aae9-f3b8-4227-9d94-23d10aef377a/volumes" Dec 05 17:35:11 crc kubenswrapper[4753]: I1205 17:35:11.741398 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6686b0a0-8673-4e80-9763-64bc2d17834b" path="/var/lib/kubelet/pods/6686b0a0-8673-4e80-9763-64bc2d17834b/volumes" Dec 05 17:35:11 crc kubenswrapper[4753]: I1205 17:35:11.749212 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a-config-data" (OuterVolumeSpecName: "config-data") pod "a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a" (UID: "a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:35:11 crc kubenswrapper[4753]: I1205 17:35:11.803327 4753 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:11 crc kubenswrapper[4753]: I1205 17:35:11.803357 4753 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:11 crc kubenswrapper[4753]: I1205 17:35:11.932227 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 05 17:35:11 crc kubenswrapper[4753]: I1205 17:35:11.981202 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 17:35:12 crc kubenswrapper[4753]: I1205 17:35:12.025916 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 17:35:12 crc kubenswrapper[4753]: I1205 17:35:12.042911 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 17:35:12 crc kubenswrapper[4753]: E1205 17:35:12.044416 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a" containerName="cinder-scheduler" Dec 05 17:35:12 crc kubenswrapper[4753]: I1205 17:35:12.044439 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a" containerName="cinder-scheduler" Dec 05 17:35:12 crc kubenswrapper[4753]: E1205 17:35:12.044488 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a" containerName="probe" Dec 05 17:35:12 crc kubenswrapper[4753]: I1205 17:35:12.044496 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a" containerName="probe" Dec 05 17:35:12 crc kubenswrapper[4753]: I1205 17:35:12.047688 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a" containerName="probe" Dec 05 17:35:12 crc kubenswrapper[4753]: I1205 17:35:12.047727 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a" containerName="cinder-scheduler" Dec 05 17:35:12 crc kubenswrapper[4753]: I1205 17:35:12.049124 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 17:35:12 crc kubenswrapper[4753]: I1205 17:35:12.051448 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 05 17:35:12 crc kubenswrapper[4753]: I1205 17:35:12.068792 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 17:35:12 crc kubenswrapper[4753]: I1205 17:35:12.172276 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-g27lw"] Dec 05 17:35:12 crc kubenswrapper[4753]: I1205 17:35:12.174709 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-g27lw" Dec 05 17:35:12 crc kubenswrapper[4753]: I1205 17:35:12.183954 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-g27lw"] Dec 05 17:35:12 crc kubenswrapper[4753]: I1205 17:35:12.214182 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/802deda2-7602-46a4-b4d0-25cd167bbdf2-scripts\") pod \"cinder-scheduler-0\" (UID: \"802deda2-7602-46a4-b4d0-25cd167bbdf2\") " pod="openstack/cinder-scheduler-0" Dec 05 17:35:12 crc kubenswrapper[4753]: I1205 17:35:12.220648 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/802deda2-7602-46a4-b4d0-25cd167bbdf2-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"802deda2-7602-46a4-b4d0-25cd167bbdf2\") " pod="openstack/cinder-scheduler-0" Dec 05 17:35:12 crc kubenswrapper[4753]: I1205 17:35:12.220923 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/802deda2-7602-46a4-b4d0-25cd167bbdf2-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"802deda2-7602-46a4-b4d0-25cd167bbdf2\") " pod="openstack/cinder-scheduler-0" Dec 05 17:35:12 crc kubenswrapper[4753]: I1205 17:35:12.221180 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/802deda2-7602-46a4-b4d0-25cd167bbdf2-config-data\") pod \"cinder-scheduler-0\" (UID: \"802deda2-7602-46a4-b4d0-25cd167bbdf2\") " pod="openstack/cinder-scheduler-0" Dec 05 17:35:12 crc kubenswrapper[4753]: I1205 17:35:12.221278 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fkgn5\" (UniqueName: \"kubernetes.io/projected/802deda2-7602-46a4-b4d0-25cd167bbdf2-kube-api-access-fkgn5\") pod \"cinder-scheduler-0\" (UID: \"802deda2-7602-46a4-b4d0-25cd167bbdf2\") " pod="openstack/cinder-scheduler-0" Dec 05 17:35:12 crc kubenswrapper[4753]: I1205 17:35:12.221416 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/802deda2-7602-46a4-b4d0-25cd167bbdf2-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"802deda2-7602-46a4-b4d0-25cd167bbdf2\") " pod="openstack/cinder-scheduler-0" Dec 05 17:35:12 crc kubenswrapper[4753]: I1205 17:35:12.323880 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/802deda2-7602-46a4-b4d0-25cd167bbdf2-scripts\") pod \"cinder-scheduler-0\" (UID: \"802deda2-7602-46a4-b4d0-25cd167bbdf2\") " pod="openstack/cinder-scheduler-0" Dec 05 17:35:12 crc kubenswrapper[4753]: I1205 17:35:12.323958 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/802deda2-7602-46a4-b4d0-25cd167bbdf2-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"802deda2-7602-46a4-b4d0-25cd167bbdf2\") " pod="openstack/cinder-scheduler-0" Dec 05 17:35:12 crc kubenswrapper[4753]: I1205 17:35:12.324033 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/541c84bb-d947-4c64-8930-787574d01f73-catalog-content\") pod \"redhat-marketplace-g27lw\" (UID: \"541c84bb-d947-4c64-8930-787574d01f73\") " pod="openshift-marketplace/redhat-marketplace-g27lw" Dec 05 17:35:12 crc kubenswrapper[4753]: I1205 17:35:12.324061 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/541c84bb-d947-4c64-8930-787574d01f73-utilities\") pod \"redhat-marketplace-g27lw\" (UID: \"541c84bb-d947-4c64-8930-787574d01f73\") " pod="openshift-marketplace/redhat-marketplace-g27lw" Dec 05 17:35:12 crc kubenswrapper[4753]: I1205 17:35:12.324105 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/802deda2-7602-46a4-b4d0-25cd167bbdf2-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"802deda2-7602-46a4-b4d0-25cd167bbdf2\") " pod="openstack/cinder-scheduler-0" Dec 05 17:35:12 crc kubenswrapper[4753]: I1205 17:35:12.324213 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/802deda2-7602-46a4-b4d0-25cd167bbdf2-config-data\") pod \"cinder-scheduler-0\" (UID: \"802deda2-7602-46a4-b4d0-25cd167bbdf2\") " pod="openstack/cinder-scheduler-0" Dec 05 17:35:12 crc kubenswrapper[4753]: I1205 17:35:12.324243 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fkgn5\" (UniqueName: \"kubernetes.io/projected/802deda2-7602-46a4-b4d0-25cd167bbdf2-kube-api-access-fkgn5\") pod \"cinder-scheduler-0\" (UID: \"802deda2-7602-46a4-b4d0-25cd167bbdf2\") " pod="openstack/cinder-scheduler-0" Dec 05 17:35:12 crc kubenswrapper[4753]: I1205 17:35:12.324271 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2dznp\" (UniqueName: \"kubernetes.io/projected/541c84bb-d947-4c64-8930-787574d01f73-kube-api-access-2dznp\") pod \"redhat-marketplace-g27lw\" (UID: \"541c84bb-d947-4c64-8930-787574d01f73\") " pod="openshift-marketplace/redhat-marketplace-g27lw" Dec 05 17:35:12 crc kubenswrapper[4753]: I1205 17:35:12.324324 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/802deda2-7602-46a4-b4d0-25cd167bbdf2-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"802deda2-7602-46a4-b4d0-25cd167bbdf2\") " pod="openstack/cinder-scheduler-0" Dec 05 17:35:12 crc kubenswrapper[4753]: I1205 17:35:12.324487 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/802deda2-7602-46a4-b4d0-25cd167bbdf2-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"802deda2-7602-46a4-b4d0-25cd167bbdf2\") " pod="openstack/cinder-scheduler-0" Dec 05 17:35:12 crc kubenswrapper[4753]: I1205 17:35:12.330757 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/802deda2-7602-46a4-b4d0-25cd167bbdf2-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"802deda2-7602-46a4-b4d0-25cd167bbdf2\") " pod="openstack/cinder-scheduler-0" Dec 05 17:35:12 crc kubenswrapper[4753]: I1205 17:35:12.343752 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/802deda2-7602-46a4-b4d0-25cd167bbdf2-scripts\") pod \"cinder-scheduler-0\" (UID: \"802deda2-7602-46a4-b4d0-25cd167bbdf2\") " pod="openstack/cinder-scheduler-0" Dec 05 17:35:12 crc kubenswrapper[4753]: I1205 17:35:12.343950 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/802deda2-7602-46a4-b4d0-25cd167bbdf2-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"802deda2-7602-46a4-b4d0-25cd167bbdf2\") " pod="openstack/cinder-scheduler-0" Dec 05 17:35:12 crc kubenswrapper[4753]: I1205 17:35:12.346585 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/802deda2-7602-46a4-b4d0-25cd167bbdf2-config-data\") pod \"cinder-scheduler-0\" (UID: \"802deda2-7602-46a4-b4d0-25cd167bbdf2\") " pod="openstack/cinder-scheduler-0" Dec 05 17:35:12 crc kubenswrapper[4753]: I1205 17:35:12.354728 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fkgn5\" (UniqueName: \"kubernetes.io/projected/802deda2-7602-46a4-b4d0-25cd167bbdf2-kube-api-access-fkgn5\") pod \"cinder-scheduler-0\" (UID: \"802deda2-7602-46a4-b4d0-25cd167bbdf2\") " pod="openstack/cinder-scheduler-0" Dec 05 17:35:12 crc kubenswrapper[4753]: I1205 17:35:12.373331 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 17:35:12 crc kubenswrapper[4753]: I1205 17:35:12.425857 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/541c84bb-d947-4c64-8930-787574d01f73-catalog-content\") pod \"redhat-marketplace-g27lw\" (UID: \"541c84bb-d947-4c64-8930-787574d01f73\") " pod="openshift-marketplace/redhat-marketplace-g27lw" Dec 05 17:35:12 crc kubenswrapper[4753]: I1205 17:35:12.425897 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/541c84bb-d947-4c64-8930-787574d01f73-utilities\") pod \"redhat-marketplace-g27lw\" (UID: \"541c84bb-d947-4c64-8930-787574d01f73\") " pod="openshift-marketplace/redhat-marketplace-g27lw" Dec 05 17:35:12 crc kubenswrapper[4753]: I1205 17:35:12.425993 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2dznp\" (UniqueName: \"kubernetes.io/projected/541c84bb-d947-4c64-8930-787574d01f73-kube-api-access-2dznp\") pod \"redhat-marketplace-g27lw\" (UID: \"541c84bb-d947-4c64-8930-787574d01f73\") " pod="openshift-marketplace/redhat-marketplace-g27lw" Dec 05 17:35:12 crc kubenswrapper[4753]: I1205 17:35:12.426448 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/541c84bb-d947-4c64-8930-787574d01f73-catalog-content\") pod \"redhat-marketplace-g27lw\" (UID: \"541c84bb-d947-4c64-8930-787574d01f73\") " pod="openshift-marketplace/redhat-marketplace-g27lw" Dec 05 17:35:12 crc kubenswrapper[4753]: I1205 17:35:12.426483 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/541c84bb-d947-4c64-8930-787574d01f73-utilities\") pod \"redhat-marketplace-g27lw\" (UID: \"541c84bb-d947-4c64-8930-787574d01f73\") " pod="openshift-marketplace/redhat-marketplace-g27lw" Dec 05 17:35:12 crc kubenswrapper[4753]: I1205 17:35:12.442986 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2dznp\" (UniqueName: \"kubernetes.io/projected/541c84bb-d947-4c64-8930-787574d01f73-kube-api-access-2dznp\") pod \"redhat-marketplace-g27lw\" (UID: \"541c84bb-d947-4c64-8930-787574d01f73\") " pod="openshift-marketplace/redhat-marketplace-g27lw" Dec 05 17:35:12 crc kubenswrapper[4753]: I1205 17:35:12.507250 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-g27lw" Dec 05 17:35:12 crc kubenswrapper[4753]: I1205 17:35:12.619437 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"e3e74a0c-459c-4ecd-be3b-9eafb9656068","Type":"ContainerStarted","Data":"04ab774fa9c185ad246330c2625e966fb597984af3ddf6a43f3b2cf495bc9d73"} Dec 05 17:35:12 crc kubenswrapper[4753]: I1205 17:35:12.619493 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"e3e74a0c-459c-4ecd-be3b-9eafb9656068","Type":"ContainerStarted","Data":"95625e0027f053715e98bf27aaf2c6c9f783f2ffd15a6bde5b0bdcfe7b3d3a40"} Dec 05 17:35:12 crc kubenswrapper[4753]: I1205 17:35:12.658790 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-proc-0" podStartSLOduration=2.658768585 podStartE2EDuration="2.658768585s" podCreationTimestamp="2025-12-05 17:35:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:35:12.648536125 +0000 UTC m=+1851.151643131" watchObservedRunningTime="2025-12-05 17:35:12.658768585 +0000 UTC m=+1851.161875591" Dec 05 17:35:12 crc kubenswrapper[4753]: I1205 17:35:12.889172 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 17:35:12 crc kubenswrapper[4753]: W1205 17:35:12.894587 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod802deda2_7602_46a4_b4d0_25cd167bbdf2.slice/crio-cb0fdd0997c065f76612848325cc17fc817bba8e6b2740eb7ffcdba79e3d6fab WatchSource:0}: Error finding container cb0fdd0997c065f76612848325cc17fc817bba8e6b2740eb7ffcdba79e3d6fab: Status 404 returned error can't find the container with id cb0fdd0997c065f76612848325cc17fc817bba8e6b2740eb7ffcdba79e3d6fab Dec 05 17:35:13 crc kubenswrapper[4753]: I1205 17:35:13.123278 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-86d9875b97-4k4sh" Dec 05 17:35:13 crc kubenswrapper[4753]: I1205 17:35:13.137676 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-g27lw"] Dec 05 17:35:13 crc kubenswrapper[4753]: I1205 17:35:13.253856 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5ccc5c4795-stl2q"] Dec 05 17:35:13 crc kubenswrapper[4753]: I1205 17:35:13.254753 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5ccc5c4795-stl2q" podUID="7fa187c8-5ebc-4112-8551-0c551b537f32" containerName="dnsmasq-dns" containerID="cri-o://2ff602be5fec339579546a2175b8050cc98dfdb05b8bd83a288cd0835e0ade94" gracePeriod=10 Dec 05 17:35:13 crc kubenswrapper[4753]: I1205 17:35:13.683884 4753 generic.go:334] "Generic (PLEG): container finished" podID="541c84bb-d947-4c64-8930-787574d01f73" containerID="7b144a374b4303f41ee589e532e86329d13a300a4668e1b67fb2711e6cafa06a" exitCode=0 Dec 05 17:35:13 crc kubenswrapper[4753]: I1205 17:35:13.684220 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g27lw" event={"ID":"541c84bb-d947-4c64-8930-787574d01f73","Type":"ContainerDied","Data":"7b144a374b4303f41ee589e532e86329d13a300a4668e1b67fb2711e6cafa06a"} Dec 05 17:35:13 crc kubenswrapper[4753]: I1205 17:35:13.684248 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g27lw" event={"ID":"541c84bb-d947-4c64-8930-787574d01f73","Type":"ContainerStarted","Data":"6a76e139669850f8299a293459d9c3365a672c57a6f695c8d883f8d20d112a5e"} Dec 05 17:35:13 crc kubenswrapper[4753]: I1205 17:35:13.696497 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"802deda2-7602-46a4-b4d0-25cd167bbdf2","Type":"ContainerStarted","Data":"cb0fdd0997c065f76612848325cc17fc817bba8e6b2740eb7ffcdba79e3d6fab"} Dec 05 17:35:13 crc kubenswrapper[4753]: I1205 17:35:13.759600 4753 generic.go:334] "Generic (PLEG): container finished" podID="7fa187c8-5ebc-4112-8551-0c551b537f32" containerID="2ff602be5fec339579546a2175b8050cc98dfdb05b8bd83a288cd0835e0ade94" exitCode=0 Dec 05 17:35:13 crc kubenswrapper[4753]: I1205 17:35:13.771867 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a" path="/var/lib/kubelet/pods/a73e4ddc-fbbe-4b4c-8b71-ba91b7ebb48a/volumes" Dec 05 17:35:13 crc kubenswrapper[4753]: I1205 17:35:13.774803 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc5c4795-stl2q" event={"ID":"7fa187c8-5ebc-4112-8551-0c551b537f32","Type":"ContainerDied","Data":"2ff602be5fec339579546a2175b8050cc98dfdb05b8bd83a288cd0835e0ade94"} Dec 05 17:35:14 crc kubenswrapper[4753]: I1205 17:35:14.045393 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ccc5c4795-stl2q" Dec 05 17:35:14 crc kubenswrapper[4753]: I1205 17:35:14.173820 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7fa187c8-5ebc-4112-8551-0c551b537f32-dns-swift-storage-0\") pod \"7fa187c8-5ebc-4112-8551-0c551b537f32\" (UID: \"7fa187c8-5ebc-4112-8551-0c551b537f32\") " Dec 05 17:35:14 crc kubenswrapper[4753]: I1205 17:35:14.173895 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7fa187c8-5ebc-4112-8551-0c551b537f32-dns-svc\") pod \"7fa187c8-5ebc-4112-8551-0c551b537f32\" (UID: \"7fa187c8-5ebc-4112-8551-0c551b537f32\") " Dec 05 17:35:14 crc kubenswrapper[4753]: I1205 17:35:14.173972 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7fa187c8-5ebc-4112-8551-0c551b537f32-ovsdbserver-nb\") pod \"7fa187c8-5ebc-4112-8551-0c551b537f32\" (UID: \"7fa187c8-5ebc-4112-8551-0c551b537f32\") " Dec 05 17:35:14 crc kubenswrapper[4753]: I1205 17:35:14.174002 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v4r57\" (UniqueName: \"kubernetes.io/projected/7fa187c8-5ebc-4112-8551-0c551b537f32-kube-api-access-v4r57\") pod \"7fa187c8-5ebc-4112-8551-0c551b537f32\" (UID: \"7fa187c8-5ebc-4112-8551-0c551b537f32\") " Dec 05 17:35:14 crc kubenswrapper[4753]: I1205 17:35:14.174058 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7fa187c8-5ebc-4112-8551-0c551b537f32-ovsdbserver-sb\") pod \"7fa187c8-5ebc-4112-8551-0c551b537f32\" (UID: \"7fa187c8-5ebc-4112-8551-0c551b537f32\") " Dec 05 17:35:14 crc kubenswrapper[4753]: I1205 17:35:14.174215 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7fa187c8-5ebc-4112-8551-0c551b537f32-config\") pod \"7fa187c8-5ebc-4112-8551-0c551b537f32\" (UID: \"7fa187c8-5ebc-4112-8551-0c551b537f32\") " Dec 05 17:35:14 crc kubenswrapper[4753]: I1205 17:35:14.206359 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7fa187c8-5ebc-4112-8551-0c551b537f32-kube-api-access-v4r57" (OuterVolumeSpecName: "kube-api-access-v4r57") pod "7fa187c8-5ebc-4112-8551-0c551b537f32" (UID: "7fa187c8-5ebc-4112-8551-0c551b537f32"). InnerVolumeSpecName "kube-api-access-v4r57". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:35:14 crc kubenswrapper[4753]: I1205 17:35:14.292591 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v4r57\" (UniqueName: \"kubernetes.io/projected/7fa187c8-5ebc-4112-8551-0c551b537f32-kube-api-access-v4r57\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:14 crc kubenswrapper[4753]: I1205 17:35:14.305508 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7fa187c8-5ebc-4112-8551-0c551b537f32-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "7fa187c8-5ebc-4112-8551-0c551b537f32" (UID: "7fa187c8-5ebc-4112-8551-0c551b537f32"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:35:14 crc kubenswrapper[4753]: I1205 17:35:14.358570 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7fa187c8-5ebc-4112-8551-0c551b537f32-config" (OuterVolumeSpecName: "config") pod "7fa187c8-5ebc-4112-8551-0c551b537f32" (UID: "7fa187c8-5ebc-4112-8551-0c551b537f32"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:35:14 crc kubenswrapper[4753]: I1205 17:35:14.363405 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7fa187c8-5ebc-4112-8551-0c551b537f32-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "7fa187c8-5ebc-4112-8551-0c551b537f32" (UID: "7fa187c8-5ebc-4112-8551-0c551b537f32"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:35:14 crc kubenswrapper[4753]: I1205 17:35:14.383477 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Dec 05 17:35:14 crc kubenswrapper[4753]: I1205 17:35:14.387730 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7fa187c8-5ebc-4112-8551-0c551b537f32-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "7fa187c8-5ebc-4112-8551-0c551b537f32" (UID: "7fa187c8-5ebc-4112-8551-0c551b537f32"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:35:14 crc kubenswrapper[4753]: I1205 17:35:14.396023 4753 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7fa187c8-5ebc-4112-8551-0c551b537f32-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:14 crc kubenswrapper[4753]: I1205 17:35:14.396076 4753 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7fa187c8-5ebc-4112-8551-0c551b537f32-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:14 crc kubenswrapper[4753]: I1205 17:35:14.396093 4753 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7fa187c8-5ebc-4112-8551-0c551b537f32-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:14 crc kubenswrapper[4753]: I1205 17:35:14.396106 4753 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7fa187c8-5ebc-4112-8551-0c551b537f32-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:14 crc kubenswrapper[4753]: I1205 17:35:14.412785 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7fa187c8-5ebc-4112-8551-0c551b537f32-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "7fa187c8-5ebc-4112-8551-0c551b537f32" (UID: "7fa187c8-5ebc-4112-8551-0c551b537f32"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:35:14 crc kubenswrapper[4753]: E1205 17:35:14.445257 4753 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0acfa75f_8372_4ccb_a91a_6741854cdd34.slice\": RecentStats: unable to find data in memory cache]" Dec 05 17:35:14 crc kubenswrapper[4753]: I1205 17:35:14.510884 4753 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7fa187c8-5ebc-4112-8551-0c551b537f32-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:14 crc kubenswrapper[4753]: I1205 17:35:14.845704 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"802deda2-7602-46a4-b4d0-25cd167bbdf2","Type":"ContainerStarted","Data":"93c4c5bde3de5b16fdc4818dca44ada8730d4cdb5e4b0694ff8521e6c487b25e"} Dec 05 17:35:14 crc kubenswrapper[4753]: I1205 17:35:14.863920 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc5c4795-stl2q" event={"ID":"7fa187c8-5ebc-4112-8551-0c551b537f32","Type":"ContainerDied","Data":"434a79eef47c833a3a354e08ba2c127bb4e15e66591a9431f2ca4e00c573db17"} Dec 05 17:35:14 crc kubenswrapper[4753]: I1205 17:35:14.863984 4753 scope.go:117] "RemoveContainer" containerID="2ff602be5fec339579546a2175b8050cc98dfdb05b8bd83a288cd0835e0ade94" Dec 05 17:35:14 crc kubenswrapper[4753]: I1205 17:35:14.864258 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ccc5c4795-stl2q" Dec 05 17:35:14 crc kubenswrapper[4753]: I1205 17:35:14.906597 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5ccc5c4795-stl2q"] Dec 05 17:35:14 crc kubenswrapper[4753]: I1205 17:35:14.914063 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5ccc5c4795-stl2q"] Dec 05 17:35:14 crc kubenswrapper[4753]: I1205 17:35:14.932112 4753 scope.go:117] "RemoveContainer" containerID="ec73a62712b61d6145fd1b2dae64e66805d2154337e603668aea42d454749f99" Dec 05 17:35:15 crc kubenswrapper[4753]: I1205 17:35:15.421979 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Dec 05 17:35:15 crc kubenswrapper[4753]: E1205 17:35:15.422795 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7fa187c8-5ebc-4112-8551-0c551b537f32" containerName="dnsmasq-dns" Dec 05 17:35:15 crc kubenswrapper[4753]: I1205 17:35:15.422812 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="7fa187c8-5ebc-4112-8551-0c551b537f32" containerName="dnsmasq-dns" Dec 05 17:35:15 crc kubenswrapper[4753]: E1205 17:35:15.422843 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7fa187c8-5ebc-4112-8551-0c551b537f32" containerName="init" Dec 05 17:35:15 crc kubenswrapper[4753]: I1205 17:35:15.422848 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="7fa187c8-5ebc-4112-8551-0c551b537f32" containerName="init" Dec 05 17:35:15 crc kubenswrapper[4753]: I1205 17:35:15.423042 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="7fa187c8-5ebc-4112-8551-0c551b537f32" containerName="dnsmasq-dns" Dec 05 17:35:15 crc kubenswrapper[4753]: I1205 17:35:15.423947 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 05 17:35:15 crc kubenswrapper[4753]: I1205 17:35:15.425665 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Dec 05 17:35:15 crc kubenswrapper[4753]: I1205 17:35:15.426079 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-cnmk5" Dec 05 17:35:15 crc kubenswrapper[4753]: I1205 17:35:15.426268 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Dec 05 17:35:15 crc kubenswrapper[4753]: I1205 17:35:15.438816 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 05 17:35:15 crc kubenswrapper[4753]: I1205 17:35:15.536597 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1917fbcd-3d32-4ceb-aeab-1119aa3d4771-combined-ca-bundle\") pod \"openstackclient\" (UID: \"1917fbcd-3d32-4ceb-aeab-1119aa3d4771\") " pod="openstack/openstackclient" Dec 05 17:35:15 crc kubenswrapper[4753]: I1205 17:35:15.536731 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c47c2\" (UniqueName: \"kubernetes.io/projected/1917fbcd-3d32-4ceb-aeab-1119aa3d4771-kube-api-access-c47c2\") pod \"openstackclient\" (UID: \"1917fbcd-3d32-4ceb-aeab-1119aa3d4771\") " pod="openstack/openstackclient" Dec 05 17:35:15 crc kubenswrapper[4753]: I1205 17:35:15.536762 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/1917fbcd-3d32-4ceb-aeab-1119aa3d4771-openstack-config-secret\") pod \"openstackclient\" (UID: \"1917fbcd-3d32-4ceb-aeab-1119aa3d4771\") " pod="openstack/openstackclient" Dec 05 17:35:15 crc kubenswrapper[4753]: I1205 17:35:15.536850 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/1917fbcd-3d32-4ceb-aeab-1119aa3d4771-openstack-config\") pod \"openstackclient\" (UID: \"1917fbcd-3d32-4ceb-aeab-1119aa3d4771\") " pod="openstack/openstackclient" Dec 05 17:35:15 crc kubenswrapper[4753]: I1205 17:35:15.638728 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/1917fbcd-3d32-4ceb-aeab-1119aa3d4771-openstack-config\") pod \"openstackclient\" (UID: \"1917fbcd-3d32-4ceb-aeab-1119aa3d4771\") " pod="openstack/openstackclient" Dec 05 17:35:15 crc kubenswrapper[4753]: I1205 17:35:15.638811 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1917fbcd-3d32-4ceb-aeab-1119aa3d4771-combined-ca-bundle\") pod \"openstackclient\" (UID: \"1917fbcd-3d32-4ceb-aeab-1119aa3d4771\") " pod="openstack/openstackclient" Dec 05 17:35:15 crc kubenswrapper[4753]: I1205 17:35:15.638891 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c47c2\" (UniqueName: \"kubernetes.io/projected/1917fbcd-3d32-4ceb-aeab-1119aa3d4771-kube-api-access-c47c2\") pod \"openstackclient\" (UID: \"1917fbcd-3d32-4ceb-aeab-1119aa3d4771\") " pod="openstack/openstackclient" Dec 05 17:35:15 crc kubenswrapper[4753]: I1205 17:35:15.638914 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/1917fbcd-3d32-4ceb-aeab-1119aa3d4771-openstack-config-secret\") pod \"openstackclient\" (UID: \"1917fbcd-3d32-4ceb-aeab-1119aa3d4771\") " pod="openstack/openstackclient" Dec 05 17:35:15 crc kubenswrapper[4753]: I1205 17:35:15.640474 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/1917fbcd-3d32-4ceb-aeab-1119aa3d4771-openstack-config\") pod \"openstackclient\" (UID: \"1917fbcd-3d32-4ceb-aeab-1119aa3d4771\") " pod="openstack/openstackclient" Dec 05 17:35:15 crc kubenswrapper[4753]: I1205 17:35:15.649126 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/1917fbcd-3d32-4ceb-aeab-1119aa3d4771-openstack-config-secret\") pod \"openstackclient\" (UID: \"1917fbcd-3d32-4ceb-aeab-1119aa3d4771\") " pod="openstack/openstackclient" Dec 05 17:35:15 crc kubenswrapper[4753]: I1205 17:35:15.651111 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1917fbcd-3d32-4ceb-aeab-1119aa3d4771-combined-ca-bundle\") pod \"openstackclient\" (UID: \"1917fbcd-3d32-4ceb-aeab-1119aa3d4771\") " pod="openstack/openstackclient" Dec 05 17:35:15 crc kubenswrapper[4753]: I1205 17:35:15.656316 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c47c2\" (UniqueName: \"kubernetes.io/projected/1917fbcd-3d32-4ceb-aeab-1119aa3d4771-kube-api-access-c47c2\") pod \"openstackclient\" (UID: \"1917fbcd-3d32-4ceb-aeab-1119aa3d4771\") " pod="openstack/openstackclient" Dec 05 17:35:15 crc kubenswrapper[4753]: I1205 17:35:15.730781 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7fa187c8-5ebc-4112-8551-0c551b537f32" path="/var/lib/kubelet/pods/7fa187c8-5ebc-4112-8551-0c551b537f32/volumes" Dec 05 17:35:15 crc kubenswrapper[4753]: I1205 17:35:15.747862 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 05 17:35:15 crc kubenswrapper[4753]: I1205 17:35:15.930489 4753 generic.go:334] "Generic (PLEG): container finished" podID="541c84bb-d947-4c64-8930-787574d01f73" containerID="e49eeb07318611378a55ed83e7c930531712a315e930c4516870863b2a6e67af" exitCode=0 Dec 05 17:35:15 crc kubenswrapper[4753]: I1205 17:35:15.930936 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g27lw" event={"ID":"541c84bb-d947-4c64-8930-787574d01f73","Type":"ContainerDied","Data":"e49eeb07318611378a55ed83e7c930531712a315e930c4516870863b2a6e67af"} Dec 05 17:35:15 crc kubenswrapper[4753]: I1205 17:35:15.960730 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"802deda2-7602-46a4-b4d0-25cd167bbdf2","Type":"ContainerStarted","Data":"88e4bc01efa44f728bdb3dce29bb18d945126c87f3619d705b87f275b1d4b2e4"} Dec 05 17:35:15 crc kubenswrapper[4753]: I1205 17:35:15.988669 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=4.9886491379999995 podStartE2EDuration="4.988649138s" podCreationTimestamp="2025-12-05 17:35:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:35:15.982455262 +0000 UTC m=+1854.485562268" watchObservedRunningTime="2025-12-05 17:35:15.988649138 +0000 UTC m=+1854.491756144" Dec 05 17:35:16 crc kubenswrapper[4753]: I1205 17:35:16.324777 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 05 17:35:16 crc kubenswrapper[4753]: I1205 17:35:16.976487 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g27lw" event={"ID":"541c84bb-d947-4c64-8930-787574d01f73","Type":"ContainerStarted","Data":"d3aab742c8b2701fffaa67498dac8f2bb70b4ede0e6a13fcdfd789baaab6fff5"} Dec 05 17:35:16 crc kubenswrapper[4753]: I1205 17:35:16.978410 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"1917fbcd-3d32-4ceb-aeab-1119aa3d4771","Type":"ContainerStarted","Data":"d42eae4094e277fe91ce9a0dad645ef23a73b16449388add87d04a9de1f36fff"} Dec 05 17:35:16 crc kubenswrapper[4753]: I1205 17:35:16.994688 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-g27lw" podStartSLOduration=2.114916624 podStartE2EDuration="4.994657048s" podCreationTimestamp="2025-12-05 17:35:12 +0000 UTC" firstStartedPulling="2025-12-05 17:35:13.688236061 +0000 UTC m=+1852.191343067" lastFinishedPulling="2025-12-05 17:35:16.567976495 +0000 UTC m=+1855.071083491" observedRunningTime="2025-12-05 17:35:16.990814669 +0000 UTC m=+1855.493921695" watchObservedRunningTime="2025-12-05 17:35:16.994657048 +0000 UTC m=+1855.497764094" Dec 05 17:35:17 crc kubenswrapper[4753]: I1205 17:35:17.373706 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 05 17:35:17 crc kubenswrapper[4753]: I1205 17:35:17.722590 4753 scope.go:117] "RemoveContainer" containerID="87682a74661e693e498cd793cc20d16fc9f4a3b8a1a6b54f10285e2dcd15eafd" Dec 05 17:35:17 crc kubenswrapper[4753]: E1205 17:35:17.725504 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 17:35:18 crc kubenswrapper[4753]: I1205 17:35:18.074716 4753 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="cffca222-0336-40c8-886f-247667322702" containerName="proxy-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 503" Dec 05 17:35:20 crc kubenswrapper[4753]: I1205 17:35:20.027795 4753 generic.go:334] "Generic (PLEG): container finished" podID="cffca222-0336-40c8-886f-247667322702" containerID="baf68e5c6a3f229094007acbd46cad9362cc7aeb4f9e7f97faacec3542aaf326" exitCode=137 Dec 05 17:35:20 crc kubenswrapper[4753]: I1205 17:35:20.027996 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cffca222-0336-40c8-886f-247667322702","Type":"ContainerDied","Data":"baf68e5c6a3f229094007acbd46cad9362cc7aeb4f9e7f97faacec3542aaf326"} Dec 05 17:35:20 crc kubenswrapper[4753]: I1205 17:35:20.304098 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 17:35:20 crc kubenswrapper[4753]: I1205 17:35:20.356903 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cffca222-0336-40c8-886f-247667322702-config-data\") pod \"cffca222-0336-40c8-886f-247667322702\" (UID: \"cffca222-0336-40c8-886f-247667322702\") " Dec 05 17:35:20 crc kubenswrapper[4753]: I1205 17:35:20.357170 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7klpl\" (UniqueName: \"kubernetes.io/projected/cffca222-0336-40c8-886f-247667322702-kube-api-access-7klpl\") pod \"cffca222-0336-40c8-886f-247667322702\" (UID: \"cffca222-0336-40c8-886f-247667322702\") " Dec 05 17:35:20 crc kubenswrapper[4753]: I1205 17:35:20.357228 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cffca222-0336-40c8-886f-247667322702-combined-ca-bundle\") pod \"cffca222-0336-40c8-886f-247667322702\" (UID: \"cffca222-0336-40c8-886f-247667322702\") " Dec 05 17:35:20 crc kubenswrapper[4753]: I1205 17:35:20.357253 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cffca222-0336-40c8-886f-247667322702-run-httpd\") pod \"cffca222-0336-40c8-886f-247667322702\" (UID: \"cffca222-0336-40c8-886f-247667322702\") " Dec 05 17:35:20 crc kubenswrapper[4753]: I1205 17:35:20.357284 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cffca222-0336-40c8-886f-247667322702-scripts\") pod \"cffca222-0336-40c8-886f-247667322702\" (UID: \"cffca222-0336-40c8-886f-247667322702\") " Dec 05 17:35:20 crc kubenswrapper[4753]: I1205 17:35:20.357359 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cffca222-0336-40c8-886f-247667322702-sg-core-conf-yaml\") pod \"cffca222-0336-40c8-886f-247667322702\" (UID: \"cffca222-0336-40c8-886f-247667322702\") " Dec 05 17:35:20 crc kubenswrapper[4753]: I1205 17:35:20.357387 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cffca222-0336-40c8-886f-247667322702-log-httpd\") pod \"cffca222-0336-40c8-886f-247667322702\" (UID: \"cffca222-0336-40c8-886f-247667322702\") " Dec 05 17:35:20 crc kubenswrapper[4753]: I1205 17:35:20.358084 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cffca222-0336-40c8-886f-247667322702-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "cffca222-0336-40c8-886f-247667322702" (UID: "cffca222-0336-40c8-886f-247667322702"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:35:20 crc kubenswrapper[4753]: I1205 17:35:20.358499 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cffca222-0336-40c8-886f-247667322702-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "cffca222-0336-40c8-886f-247667322702" (UID: "cffca222-0336-40c8-886f-247667322702"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:35:20 crc kubenswrapper[4753]: I1205 17:35:20.370354 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cffca222-0336-40c8-886f-247667322702-scripts" (OuterVolumeSpecName: "scripts") pod "cffca222-0336-40c8-886f-247667322702" (UID: "cffca222-0336-40c8-886f-247667322702"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:35:20 crc kubenswrapper[4753]: I1205 17:35:20.371608 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cffca222-0336-40c8-886f-247667322702-kube-api-access-7klpl" (OuterVolumeSpecName: "kube-api-access-7klpl") pod "cffca222-0336-40c8-886f-247667322702" (UID: "cffca222-0336-40c8-886f-247667322702"). InnerVolumeSpecName "kube-api-access-7klpl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:35:20 crc kubenswrapper[4753]: I1205 17:35:20.401895 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cffca222-0336-40c8-886f-247667322702-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "cffca222-0336-40c8-886f-247667322702" (UID: "cffca222-0336-40c8-886f-247667322702"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:35:20 crc kubenswrapper[4753]: I1205 17:35:20.417285 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cffca222-0336-40c8-886f-247667322702-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cffca222-0336-40c8-886f-247667322702" (UID: "cffca222-0336-40c8-886f-247667322702"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:35:20 crc kubenswrapper[4753]: I1205 17:35:20.460440 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7klpl\" (UniqueName: \"kubernetes.io/projected/cffca222-0336-40c8-886f-247667322702-kube-api-access-7klpl\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:20 crc kubenswrapper[4753]: I1205 17:35:20.460476 4753 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cffca222-0336-40c8-886f-247667322702-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:20 crc kubenswrapper[4753]: I1205 17:35:20.460487 4753 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cffca222-0336-40c8-886f-247667322702-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:20 crc kubenswrapper[4753]: I1205 17:35:20.460495 4753 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cffca222-0336-40c8-886f-247667322702-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:20 crc kubenswrapper[4753]: I1205 17:35:20.460504 4753 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cffca222-0336-40c8-886f-247667322702-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:20 crc kubenswrapper[4753]: I1205 17:35:20.460512 4753 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cffca222-0336-40c8-886f-247667322702-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:20 crc kubenswrapper[4753]: I1205 17:35:20.464064 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cffca222-0336-40c8-886f-247667322702-config-data" (OuterVolumeSpecName: "config-data") pod "cffca222-0336-40c8-886f-247667322702" (UID: "cffca222-0336-40c8-886f-247667322702"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:35:20 crc kubenswrapper[4753]: I1205 17:35:20.562686 4753 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cffca222-0336-40c8-886f-247667322702-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:21 crc kubenswrapper[4753]: I1205 17:35:21.039894 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cffca222-0336-40c8-886f-247667322702","Type":"ContainerDied","Data":"0478e3b1ec2a549cc7093ced1233130fda2058faddd46c0ee81a8755e7923cfb"} Dec 05 17:35:21 crc kubenswrapper[4753]: I1205 17:35:21.040000 4753 scope.go:117] "RemoveContainer" containerID="baf68e5c6a3f229094007acbd46cad9362cc7aeb4f9e7f97faacec3542aaf326" Dec 05 17:35:21 crc kubenswrapper[4753]: I1205 17:35:21.039929 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 17:35:21 crc kubenswrapper[4753]: I1205 17:35:21.117219 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:35:21 crc kubenswrapper[4753]: I1205 17:35:21.128296 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:35:21 crc kubenswrapper[4753]: I1205 17:35:21.142792 4753 scope.go:117] "RemoveContainer" containerID="b0490a0fecfb0d034ea03e1168b2d4cfd167f436e290a0ebc77767227007f70f" Dec 05 17:35:21 crc kubenswrapper[4753]: I1205 17:35:21.158552 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:35:21 crc kubenswrapper[4753]: E1205 17:35:21.159042 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cffca222-0336-40c8-886f-247667322702" containerName="ceilometer-notification-agent" Dec 05 17:35:21 crc kubenswrapper[4753]: I1205 17:35:21.159060 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="cffca222-0336-40c8-886f-247667322702" containerName="ceilometer-notification-agent" Dec 05 17:35:21 crc kubenswrapper[4753]: E1205 17:35:21.159091 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cffca222-0336-40c8-886f-247667322702" containerName="sg-core" Dec 05 17:35:21 crc kubenswrapper[4753]: I1205 17:35:21.159100 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="cffca222-0336-40c8-886f-247667322702" containerName="sg-core" Dec 05 17:35:21 crc kubenswrapper[4753]: E1205 17:35:21.159115 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cffca222-0336-40c8-886f-247667322702" containerName="proxy-httpd" Dec 05 17:35:21 crc kubenswrapper[4753]: I1205 17:35:21.159122 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="cffca222-0336-40c8-886f-247667322702" containerName="proxy-httpd" Dec 05 17:35:21 crc kubenswrapper[4753]: I1205 17:35:21.159337 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="cffca222-0336-40c8-886f-247667322702" containerName="proxy-httpd" Dec 05 17:35:21 crc kubenswrapper[4753]: I1205 17:35:21.159357 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="cffca222-0336-40c8-886f-247667322702" containerName="ceilometer-notification-agent" Dec 05 17:35:21 crc kubenswrapper[4753]: I1205 17:35:21.159385 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="cffca222-0336-40c8-886f-247667322702" containerName="sg-core" Dec 05 17:35:21 crc kubenswrapper[4753]: I1205 17:35:21.163241 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 17:35:21 crc kubenswrapper[4753]: I1205 17:35:21.165906 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 17:35:21 crc kubenswrapper[4753]: I1205 17:35:21.165906 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 17:35:21 crc kubenswrapper[4753]: I1205 17:35:21.180220 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:35:21 crc kubenswrapper[4753]: I1205 17:35:21.279704 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/02bce72e-5cac-4c12-b1ba-e08f2b95cbe2-scripts\") pod \"ceilometer-0\" (UID: \"02bce72e-5cac-4c12-b1ba-e08f2b95cbe2\") " pod="openstack/ceilometer-0" Dec 05 17:35:21 crc kubenswrapper[4753]: I1205 17:35:21.280081 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/02bce72e-5cac-4c12-b1ba-e08f2b95cbe2-log-httpd\") pod \"ceilometer-0\" (UID: \"02bce72e-5cac-4c12-b1ba-e08f2b95cbe2\") " pod="openstack/ceilometer-0" Dec 05 17:35:21 crc kubenswrapper[4753]: I1205 17:35:21.280132 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02bce72e-5cac-4c12-b1ba-e08f2b95cbe2-config-data\") pod \"ceilometer-0\" (UID: \"02bce72e-5cac-4c12-b1ba-e08f2b95cbe2\") " pod="openstack/ceilometer-0" Dec 05 17:35:21 crc kubenswrapper[4753]: I1205 17:35:21.280182 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02bce72e-5cac-4c12-b1ba-e08f2b95cbe2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"02bce72e-5cac-4c12-b1ba-e08f2b95cbe2\") " pod="openstack/ceilometer-0" Dec 05 17:35:21 crc kubenswrapper[4753]: I1205 17:35:21.280225 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/02bce72e-5cac-4c12-b1ba-e08f2b95cbe2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"02bce72e-5cac-4c12-b1ba-e08f2b95cbe2\") " pod="openstack/ceilometer-0" Dec 05 17:35:21 crc kubenswrapper[4753]: I1205 17:35:21.280271 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zpqdq\" (UniqueName: \"kubernetes.io/projected/02bce72e-5cac-4c12-b1ba-e08f2b95cbe2-kube-api-access-zpqdq\") pod \"ceilometer-0\" (UID: \"02bce72e-5cac-4c12-b1ba-e08f2b95cbe2\") " pod="openstack/ceilometer-0" Dec 05 17:35:21 crc kubenswrapper[4753]: I1205 17:35:21.280331 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/02bce72e-5cac-4c12-b1ba-e08f2b95cbe2-run-httpd\") pod \"ceilometer-0\" (UID: \"02bce72e-5cac-4c12-b1ba-e08f2b95cbe2\") " pod="openstack/ceilometer-0" Dec 05 17:35:21 crc kubenswrapper[4753]: I1205 17:35:21.388959 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zpqdq\" (UniqueName: \"kubernetes.io/projected/02bce72e-5cac-4c12-b1ba-e08f2b95cbe2-kube-api-access-zpqdq\") pod \"ceilometer-0\" (UID: \"02bce72e-5cac-4c12-b1ba-e08f2b95cbe2\") " pod="openstack/ceilometer-0" Dec 05 17:35:21 crc kubenswrapper[4753]: I1205 17:35:21.389076 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/02bce72e-5cac-4c12-b1ba-e08f2b95cbe2-run-httpd\") pod \"ceilometer-0\" (UID: \"02bce72e-5cac-4c12-b1ba-e08f2b95cbe2\") " pod="openstack/ceilometer-0" Dec 05 17:35:21 crc kubenswrapper[4753]: I1205 17:35:21.389257 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/02bce72e-5cac-4c12-b1ba-e08f2b95cbe2-scripts\") pod \"ceilometer-0\" (UID: \"02bce72e-5cac-4c12-b1ba-e08f2b95cbe2\") " pod="openstack/ceilometer-0" Dec 05 17:35:21 crc kubenswrapper[4753]: I1205 17:35:21.389311 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/02bce72e-5cac-4c12-b1ba-e08f2b95cbe2-log-httpd\") pod \"ceilometer-0\" (UID: \"02bce72e-5cac-4c12-b1ba-e08f2b95cbe2\") " pod="openstack/ceilometer-0" Dec 05 17:35:21 crc kubenswrapper[4753]: I1205 17:35:21.389354 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02bce72e-5cac-4c12-b1ba-e08f2b95cbe2-config-data\") pod \"ceilometer-0\" (UID: \"02bce72e-5cac-4c12-b1ba-e08f2b95cbe2\") " pod="openstack/ceilometer-0" Dec 05 17:35:21 crc kubenswrapper[4753]: I1205 17:35:21.389382 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02bce72e-5cac-4c12-b1ba-e08f2b95cbe2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"02bce72e-5cac-4c12-b1ba-e08f2b95cbe2\") " pod="openstack/ceilometer-0" Dec 05 17:35:21 crc kubenswrapper[4753]: I1205 17:35:21.389423 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/02bce72e-5cac-4c12-b1ba-e08f2b95cbe2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"02bce72e-5cac-4c12-b1ba-e08f2b95cbe2\") " pod="openstack/ceilometer-0" Dec 05 17:35:21 crc kubenswrapper[4753]: I1205 17:35:21.397433 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/02bce72e-5cac-4c12-b1ba-e08f2b95cbe2-run-httpd\") pod \"ceilometer-0\" (UID: \"02bce72e-5cac-4c12-b1ba-e08f2b95cbe2\") " pod="openstack/ceilometer-0" Dec 05 17:35:21 crc kubenswrapper[4753]: I1205 17:35:21.397981 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/02bce72e-5cac-4c12-b1ba-e08f2b95cbe2-log-httpd\") pod \"ceilometer-0\" (UID: \"02bce72e-5cac-4c12-b1ba-e08f2b95cbe2\") " pod="openstack/ceilometer-0" Dec 05 17:35:21 crc kubenswrapper[4753]: I1205 17:35:21.400785 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02bce72e-5cac-4c12-b1ba-e08f2b95cbe2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"02bce72e-5cac-4c12-b1ba-e08f2b95cbe2\") " pod="openstack/ceilometer-0" Dec 05 17:35:21 crc kubenswrapper[4753]: I1205 17:35:21.421702 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/02bce72e-5cac-4c12-b1ba-e08f2b95cbe2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"02bce72e-5cac-4c12-b1ba-e08f2b95cbe2\") " pod="openstack/ceilometer-0" Dec 05 17:35:21 crc kubenswrapper[4753]: I1205 17:35:21.422490 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/02bce72e-5cac-4c12-b1ba-e08f2b95cbe2-scripts\") pod \"ceilometer-0\" (UID: \"02bce72e-5cac-4c12-b1ba-e08f2b95cbe2\") " pod="openstack/ceilometer-0" Dec 05 17:35:21 crc kubenswrapper[4753]: I1205 17:35:21.425292 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zpqdq\" (UniqueName: \"kubernetes.io/projected/02bce72e-5cac-4c12-b1ba-e08f2b95cbe2-kube-api-access-zpqdq\") pod \"ceilometer-0\" (UID: \"02bce72e-5cac-4c12-b1ba-e08f2b95cbe2\") " pod="openstack/ceilometer-0" Dec 05 17:35:21 crc kubenswrapper[4753]: I1205 17:35:21.425921 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02bce72e-5cac-4c12-b1ba-e08f2b95cbe2-config-data\") pod \"ceilometer-0\" (UID: \"02bce72e-5cac-4c12-b1ba-e08f2b95cbe2\") " pod="openstack/ceilometer-0" Dec 05 17:35:21 crc kubenswrapper[4753]: I1205 17:35:21.503702 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 17:35:21 crc kubenswrapper[4753]: I1205 17:35:21.743624 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cffca222-0336-40c8-886f-247667322702" path="/var/lib/kubelet/pods/cffca222-0336-40c8-886f-247667322702/volumes" Dec 05 17:35:22 crc kubenswrapper[4753]: I1205 17:35:22.044240 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-6d854f58c-mvlpx"] Dec 05 17:35:22 crc kubenswrapper[4753]: I1205 17:35:22.052512 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-6d854f58c-mvlpx" Dec 05 17:35:22 crc kubenswrapper[4753]: I1205 17:35:22.057085 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Dec 05 17:35:22 crc kubenswrapper[4753]: I1205 17:35:22.057245 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Dec 05 17:35:22 crc kubenswrapper[4753]: I1205 17:35:22.057304 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Dec 05 17:35:22 crc kubenswrapper[4753]: I1205 17:35:22.060502 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-6d854f58c-mvlpx"] Dec 05 17:35:22 crc kubenswrapper[4753]: I1205 17:35:22.109817 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc5495c5-a8e0-46c4-82c8-be930b187322-combined-ca-bundle\") pod \"swift-proxy-6d854f58c-mvlpx\" (UID: \"fc5495c5-a8e0-46c4-82c8-be930b187322\") " pod="openstack/swift-proxy-6d854f58c-mvlpx" Dec 05 17:35:22 crc kubenswrapper[4753]: I1205 17:35:22.109858 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fc5495c5-a8e0-46c4-82c8-be930b187322-run-httpd\") pod \"swift-proxy-6d854f58c-mvlpx\" (UID: \"fc5495c5-a8e0-46c4-82c8-be930b187322\") " pod="openstack/swift-proxy-6d854f58c-mvlpx" Dec 05 17:35:22 crc kubenswrapper[4753]: I1205 17:35:22.109900 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/fc5495c5-a8e0-46c4-82c8-be930b187322-public-tls-certs\") pod \"swift-proxy-6d854f58c-mvlpx\" (UID: \"fc5495c5-a8e0-46c4-82c8-be930b187322\") " pod="openstack/swift-proxy-6d854f58c-mvlpx" Dec 05 17:35:22 crc kubenswrapper[4753]: I1205 17:35:22.109918 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/fc5495c5-a8e0-46c4-82c8-be930b187322-internal-tls-certs\") pod \"swift-proxy-6d854f58c-mvlpx\" (UID: \"fc5495c5-a8e0-46c4-82c8-be930b187322\") " pod="openstack/swift-proxy-6d854f58c-mvlpx" Dec 05 17:35:22 crc kubenswrapper[4753]: I1205 17:35:22.109957 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zw84l\" (UniqueName: \"kubernetes.io/projected/fc5495c5-a8e0-46c4-82c8-be930b187322-kube-api-access-zw84l\") pod \"swift-proxy-6d854f58c-mvlpx\" (UID: \"fc5495c5-a8e0-46c4-82c8-be930b187322\") " pod="openstack/swift-proxy-6d854f58c-mvlpx" Dec 05 17:35:22 crc kubenswrapper[4753]: I1205 17:35:22.110009 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/fc5495c5-a8e0-46c4-82c8-be930b187322-etc-swift\") pod \"swift-proxy-6d854f58c-mvlpx\" (UID: \"fc5495c5-a8e0-46c4-82c8-be930b187322\") " pod="openstack/swift-proxy-6d854f58c-mvlpx" Dec 05 17:35:22 crc kubenswrapper[4753]: I1205 17:35:22.110040 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc5495c5-a8e0-46c4-82c8-be930b187322-config-data\") pod \"swift-proxy-6d854f58c-mvlpx\" (UID: \"fc5495c5-a8e0-46c4-82c8-be930b187322\") " pod="openstack/swift-proxy-6d854f58c-mvlpx" Dec 05 17:35:22 crc kubenswrapper[4753]: I1205 17:35:22.110083 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fc5495c5-a8e0-46c4-82c8-be930b187322-log-httpd\") pod \"swift-proxy-6d854f58c-mvlpx\" (UID: \"fc5495c5-a8e0-46c4-82c8-be930b187322\") " pod="openstack/swift-proxy-6d854f58c-mvlpx" Dec 05 17:35:22 crc kubenswrapper[4753]: I1205 17:35:22.214418 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fc5495c5-a8e0-46c4-82c8-be930b187322-log-httpd\") pod \"swift-proxy-6d854f58c-mvlpx\" (UID: \"fc5495c5-a8e0-46c4-82c8-be930b187322\") " pod="openstack/swift-proxy-6d854f58c-mvlpx" Dec 05 17:35:22 crc kubenswrapper[4753]: I1205 17:35:22.214558 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc5495c5-a8e0-46c4-82c8-be930b187322-combined-ca-bundle\") pod \"swift-proxy-6d854f58c-mvlpx\" (UID: \"fc5495c5-a8e0-46c4-82c8-be930b187322\") " pod="openstack/swift-proxy-6d854f58c-mvlpx" Dec 05 17:35:22 crc kubenswrapper[4753]: I1205 17:35:22.214583 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fc5495c5-a8e0-46c4-82c8-be930b187322-run-httpd\") pod \"swift-proxy-6d854f58c-mvlpx\" (UID: \"fc5495c5-a8e0-46c4-82c8-be930b187322\") " pod="openstack/swift-proxy-6d854f58c-mvlpx" Dec 05 17:35:22 crc kubenswrapper[4753]: I1205 17:35:22.214612 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/fc5495c5-a8e0-46c4-82c8-be930b187322-public-tls-certs\") pod \"swift-proxy-6d854f58c-mvlpx\" (UID: \"fc5495c5-a8e0-46c4-82c8-be930b187322\") " pod="openstack/swift-proxy-6d854f58c-mvlpx" Dec 05 17:35:22 crc kubenswrapper[4753]: I1205 17:35:22.214630 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/fc5495c5-a8e0-46c4-82c8-be930b187322-internal-tls-certs\") pod \"swift-proxy-6d854f58c-mvlpx\" (UID: \"fc5495c5-a8e0-46c4-82c8-be930b187322\") " pod="openstack/swift-proxy-6d854f58c-mvlpx" Dec 05 17:35:22 crc kubenswrapper[4753]: I1205 17:35:22.214660 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zw84l\" (UniqueName: \"kubernetes.io/projected/fc5495c5-a8e0-46c4-82c8-be930b187322-kube-api-access-zw84l\") pod \"swift-proxy-6d854f58c-mvlpx\" (UID: \"fc5495c5-a8e0-46c4-82c8-be930b187322\") " pod="openstack/swift-proxy-6d854f58c-mvlpx" Dec 05 17:35:22 crc kubenswrapper[4753]: I1205 17:35:22.214704 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/fc5495c5-a8e0-46c4-82c8-be930b187322-etc-swift\") pod \"swift-proxy-6d854f58c-mvlpx\" (UID: \"fc5495c5-a8e0-46c4-82c8-be930b187322\") " pod="openstack/swift-proxy-6d854f58c-mvlpx" Dec 05 17:35:22 crc kubenswrapper[4753]: I1205 17:35:22.214734 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc5495c5-a8e0-46c4-82c8-be930b187322-config-data\") pod \"swift-proxy-6d854f58c-mvlpx\" (UID: \"fc5495c5-a8e0-46c4-82c8-be930b187322\") " pod="openstack/swift-proxy-6d854f58c-mvlpx" Dec 05 17:35:22 crc kubenswrapper[4753]: I1205 17:35:22.216562 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fc5495c5-a8e0-46c4-82c8-be930b187322-log-httpd\") pod \"swift-proxy-6d854f58c-mvlpx\" (UID: \"fc5495c5-a8e0-46c4-82c8-be930b187322\") " pod="openstack/swift-proxy-6d854f58c-mvlpx" Dec 05 17:35:22 crc kubenswrapper[4753]: I1205 17:35:22.218309 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/fc5495c5-a8e0-46c4-82c8-be930b187322-run-httpd\") pod \"swift-proxy-6d854f58c-mvlpx\" (UID: \"fc5495c5-a8e0-46c4-82c8-be930b187322\") " pod="openstack/swift-proxy-6d854f58c-mvlpx" Dec 05 17:35:22 crc kubenswrapper[4753]: I1205 17:35:22.220271 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/fc5495c5-a8e0-46c4-82c8-be930b187322-internal-tls-certs\") pod \"swift-proxy-6d854f58c-mvlpx\" (UID: \"fc5495c5-a8e0-46c4-82c8-be930b187322\") " pod="openstack/swift-proxy-6d854f58c-mvlpx" Dec 05 17:35:22 crc kubenswrapper[4753]: I1205 17:35:22.222340 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/fc5495c5-a8e0-46c4-82c8-be930b187322-public-tls-certs\") pod \"swift-proxy-6d854f58c-mvlpx\" (UID: \"fc5495c5-a8e0-46c4-82c8-be930b187322\") " pod="openstack/swift-proxy-6d854f58c-mvlpx" Dec 05 17:35:22 crc kubenswrapper[4753]: I1205 17:35:22.236367 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/fc5495c5-a8e0-46c4-82c8-be930b187322-etc-swift\") pod \"swift-proxy-6d854f58c-mvlpx\" (UID: \"fc5495c5-a8e0-46c4-82c8-be930b187322\") " pod="openstack/swift-proxy-6d854f58c-mvlpx" Dec 05 17:35:22 crc kubenswrapper[4753]: I1205 17:35:22.238503 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc5495c5-a8e0-46c4-82c8-be930b187322-combined-ca-bundle\") pod \"swift-proxy-6d854f58c-mvlpx\" (UID: \"fc5495c5-a8e0-46c4-82c8-be930b187322\") " pod="openstack/swift-proxy-6d854f58c-mvlpx" Dec 05 17:35:22 crc kubenswrapper[4753]: I1205 17:35:22.240944 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zw84l\" (UniqueName: \"kubernetes.io/projected/fc5495c5-a8e0-46c4-82c8-be930b187322-kube-api-access-zw84l\") pod \"swift-proxy-6d854f58c-mvlpx\" (UID: \"fc5495c5-a8e0-46c4-82c8-be930b187322\") " pod="openstack/swift-proxy-6d854f58c-mvlpx" Dec 05 17:35:22 crc kubenswrapper[4753]: I1205 17:35:22.241820 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc5495c5-a8e0-46c4-82c8-be930b187322-config-data\") pod \"swift-proxy-6d854f58c-mvlpx\" (UID: \"fc5495c5-a8e0-46c4-82c8-be930b187322\") " pod="openstack/swift-proxy-6d854f58c-mvlpx" Dec 05 17:35:22 crc kubenswrapper[4753]: I1205 17:35:22.437630 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-6d854f58c-mvlpx" Dec 05 17:35:22 crc kubenswrapper[4753]: I1205 17:35:22.507316 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-g27lw" Dec 05 17:35:22 crc kubenswrapper[4753]: I1205 17:35:22.507367 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-g27lw" Dec 05 17:35:22 crc kubenswrapper[4753]: I1205 17:35:22.565423 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-g27lw" Dec 05 17:35:22 crc kubenswrapper[4753]: I1205 17:35:22.695506 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 05 17:35:23 crc kubenswrapper[4753]: I1205 17:35:23.143601 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-g27lw" Dec 05 17:35:23 crc kubenswrapper[4753]: I1205 17:35:23.202507 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-g27lw"] Dec 05 17:35:23 crc kubenswrapper[4753]: I1205 17:35:23.449769 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:35:25 crc kubenswrapper[4753]: I1205 17:35:25.101112 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-g27lw" podUID="541c84bb-d947-4c64-8930-787574d01f73" containerName="registry-server" containerID="cri-o://d3aab742c8b2701fffaa67498dac8f2bb70b4ede0e6a13fcdfd789baaab6fff5" gracePeriod=2 Dec 05 17:35:26 crc kubenswrapper[4753]: I1205 17:35:26.116846 4753 generic.go:334] "Generic (PLEG): container finished" podID="541c84bb-d947-4c64-8930-787574d01f73" containerID="d3aab742c8b2701fffaa67498dac8f2bb70b4ede0e6a13fcdfd789baaab6fff5" exitCode=0 Dec 05 17:35:26 crc kubenswrapper[4753]: I1205 17:35:26.116891 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g27lw" event={"ID":"541c84bb-d947-4c64-8930-787574d01f73","Type":"ContainerDied","Data":"d3aab742c8b2701fffaa67498dac8f2bb70b4ede0e6a13fcdfd789baaab6fff5"} Dec 05 17:35:27 crc kubenswrapper[4753]: I1205 17:35:27.453839 4753 scope.go:117] "RemoveContainer" containerID="c5c0a4fafd3b40829436466d86fa7132ec845878079668fc5f2fb5a78a8c4968" Dec 05 17:35:27 crc kubenswrapper[4753]: I1205 17:35:27.813171 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-g27lw" Dec 05 17:35:27 crc kubenswrapper[4753]: I1205 17:35:27.968999 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2dznp\" (UniqueName: \"kubernetes.io/projected/541c84bb-d947-4c64-8930-787574d01f73-kube-api-access-2dznp\") pod \"541c84bb-d947-4c64-8930-787574d01f73\" (UID: \"541c84bb-d947-4c64-8930-787574d01f73\") " Dec 05 17:35:27 crc kubenswrapper[4753]: I1205 17:35:27.971307 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/541c84bb-d947-4c64-8930-787574d01f73-utilities\") pod \"541c84bb-d947-4c64-8930-787574d01f73\" (UID: \"541c84bb-d947-4c64-8930-787574d01f73\") " Dec 05 17:35:27 crc kubenswrapper[4753]: I1205 17:35:27.971500 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/541c84bb-d947-4c64-8930-787574d01f73-catalog-content\") pod \"541c84bb-d947-4c64-8930-787574d01f73\" (UID: \"541c84bb-d947-4c64-8930-787574d01f73\") " Dec 05 17:35:27 crc kubenswrapper[4753]: I1205 17:35:27.972727 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/541c84bb-d947-4c64-8930-787574d01f73-utilities" (OuterVolumeSpecName: "utilities") pod "541c84bb-d947-4c64-8930-787574d01f73" (UID: "541c84bb-d947-4c64-8930-787574d01f73"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:35:27 crc kubenswrapper[4753]: I1205 17:35:27.980006 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/541c84bb-d947-4c64-8930-787574d01f73-kube-api-access-2dznp" (OuterVolumeSpecName: "kube-api-access-2dznp") pod "541c84bb-d947-4c64-8930-787574d01f73" (UID: "541c84bb-d947-4c64-8930-787574d01f73"). InnerVolumeSpecName "kube-api-access-2dznp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:35:28 crc kubenswrapper[4753]: I1205 17:35:28.030041 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/541c84bb-d947-4c64-8930-787574d01f73-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "541c84bb-d947-4c64-8930-787574d01f73" (UID: "541c84bb-d947-4c64-8930-787574d01f73"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:35:28 crc kubenswrapper[4753]: I1205 17:35:28.074884 4753 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/541c84bb-d947-4c64-8930-787574d01f73-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:28 crc kubenswrapper[4753]: I1205 17:35:28.075219 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2dznp\" (UniqueName: \"kubernetes.io/projected/541c84bb-d947-4c64-8930-787574d01f73-kube-api-access-2dznp\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:28 crc kubenswrapper[4753]: I1205 17:35:28.075231 4753 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/541c84bb-d947-4c64-8930-787574d01f73-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:28 crc kubenswrapper[4753]: I1205 17:35:28.135700 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:35:28 crc kubenswrapper[4753]: I1205 17:35:28.173443 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g27lw" event={"ID":"541c84bb-d947-4c64-8930-787574d01f73","Type":"ContainerDied","Data":"6a76e139669850f8299a293459d9c3365a672c57a6f695c8d883f8d20d112a5e"} Dec 05 17:35:28 crc kubenswrapper[4753]: I1205 17:35:28.173530 4753 scope.go:117] "RemoveContainer" containerID="d3aab742c8b2701fffaa67498dac8f2bb70b4ede0e6a13fcdfd789baaab6fff5" Dec 05 17:35:28 crc kubenswrapper[4753]: I1205 17:35:28.173701 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-g27lw" Dec 05 17:35:28 crc kubenswrapper[4753]: I1205 17:35:28.189477 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"02bce72e-5cac-4c12-b1ba-e08f2b95cbe2","Type":"ContainerStarted","Data":"1e58d87e769304950025932f88429079091a04eb29ceb1a9f1520002b144c11d"} Dec 05 17:35:28 crc kubenswrapper[4753]: I1205 17:35:28.197124 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"1917fbcd-3d32-4ceb-aeab-1119aa3d4771","Type":"ContainerStarted","Data":"c6db617aa36ca8556f3d1964c03e00d16fc555454de3cb8a165cb3bdccf4a6e0"} Dec 05 17:35:28 crc kubenswrapper[4753]: I1205 17:35:28.214400 4753 scope.go:117] "RemoveContainer" containerID="e49eeb07318611378a55ed83e7c930531712a315e930c4516870863b2a6e67af" Dec 05 17:35:28 crc kubenswrapper[4753]: I1205 17:35:28.231452 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-g27lw"] Dec 05 17:35:28 crc kubenswrapper[4753]: I1205 17:35:28.241490 4753 scope.go:117] "RemoveContainer" containerID="7b144a374b4303f41ee589e532e86329d13a300a4668e1b67fb2711e6cafa06a" Dec 05 17:35:28 crc kubenswrapper[4753]: I1205 17:35:28.242403 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-g27lw"] Dec 05 17:35:28 crc kubenswrapper[4753]: W1205 17:35:28.246282 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfc5495c5_a8e0_46c4_82c8_be930b187322.slice/crio-06257ae57853ab92cc675da0c6af5d81feff9439979d055299d42e4638de4002 WatchSource:0}: Error finding container 06257ae57853ab92cc675da0c6af5d81feff9439979d055299d42e4638de4002: Status 404 returned error can't find the container with id 06257ae57853ab92cc675da0c6af5d81feff9439979d055299d42e4638de4002 Dec 05 17:35:28 crc kubenswrapper[4753]: I1205 17:35:28.249187 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.061974649 podStartE2EDuration="13.249169305s" podCreationTimestamp="2025-12-05 17:35:15 +0000 UTC" firstStartedPulling="2025-12-05 17:35:16.369262343 +0000 UTC m=+1854.872369349" lastFinishedPulling="2025-12-05 17:35:27.556456989 +0000 UTC m=+1866.059564005" observedRunningTime="2025-12-05 17:35:28.224202706 +0000 UTC m=+1866.727309722" watchObservedRunningTime="2025-12-05 17:35:28.249169305 +0000 UTC m=+1866.752276321" Dec 05 17:35:28 crc kubenswrapper[4753]: I1205 17:35:28.265900 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-6d854f58c-mvlpx"] Dec 05 17:35:28 crc kubenswrapper[4753]: I1205 17:35:28.720855 4753 scope.go:117] "RemoveContainer" containerID="87682a74661e693e498cd793cc20d16fc9f4a3b8a1a6b54f10285e2dcd15eafd" Dec 05 17:35:28 crc kubenswrapper[4753]: E1205 17:35:28.721470 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 17:35:29 crc kubenswrapper[4753]: I1205 17:35:29.212915 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"02bce72e-5cac-4c12-b1ba-e08f2b95cbe2","Type":"ContainerStarted","Data":"67211f695f145a32745ef7a96d02f2c1a5f32b27c2adc3b25b1d2c7eb4612303"} Dec 05 17:35:29 crc kubenswrapper[4753]: I1205 17:35:29.215996 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-6d854f58c-mvlpx" event={"ID":"fc5495c5-a8e0-46c4-82c8-be930b187322","Type":"ContainerStarted","Data":"0edbbfeadf495ab35865175ade658a39f92d27c810a7cbd52b2c97b0c63dee0e"} Dec 05 17:35:29 crc kubenswrapper[4753]: I1205 17:35:29.216019 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-6d854f58c-mvlpx" event={"ID":"fc5495c5-a8e0-46c4-82c8-be930b187322","Type":"ContainerStarted","Data":"59cc3227acbc839633e4b79a0dc064b66e67f193529750edc48a1d6d60399fe8"} Dec 05 17:35:29 crc kubenswrapper[4753]: I1205 17:35:29.216030 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-6d854f58c-mvlpx" event={"ID":"fc5495c5-a8e0-46c4-82c8-be930b187322","Type":"ContainerStarted","Data":"06257ae57853ab92cc675da0c6af5d81feff9439979d055299d42e4638de4002"} Dec 05 17:35:29 crc kubenswrapper[4753]: I1205 17:35:29.216227 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-6d854f58c-mvlpx" Dec 05 17:35:29 crc kubenswrapper[4753]: I1205 17:35:29.243733 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-6d854f58c-mvlpx" podStartSLOduration=7.24370761 podStartE2EDuration="7.24370761s" podCreationTimestamp="2025-12-05 17:35:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:35:29.233054207 +0000 UTC m=+1867.736161253" watchObservedRunningTime="2025-12-05 17:35:29.24370761 +0000 UTC m=+1867.746814616" Dec 05 17:35:29 crc kubenswrapper[4753]: I1205 17:35:29.745481 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="541c84bb-d947-4c64-8930-787574d01f73" path="/var/lib/kubelet/pods/541c84bb-d947-4c64-8930-787574d01f73/volumes" Dec 05 17:35:29 crc kubenswrapper[4753]: I1205 17:35:29.832013 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 17:35:29 crc kubenswrapper[4753]: I1205 17:35:29.833088 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="8e5b52fd-87d5-4912-a8b2-305d4271a5ba" containerName="glance-log" containerID="cri-o://415e3d210ea73c9242e858919e7b9133f06ea6f24a8e1aa7e1aaa51f035458fd" gracePeriod=30 Dec 05 17:35:29 crc kubenswrapper[4753]: I1205 17:35:29.833286 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="8e5b52fd-87d5-4912-a8b2-305d4271a5ba" containerName="glance-httpd" containerID="cri-o://31ff5943ec907ee59d90022c023858050de26c11bb52b8a711ff3910dd69ad61" gracePeriod=30 Dec 05 17:35:30 crc kubenswrapper[4753]: I1205 17:35:30.234996 4753 generic.go:334] "Generic (PLEG): container finished" podID="8e5b52fd-87d5-4912-a8b2-305d4271a5ba" containerID="415e3d210ea73c9242e858919e7b9133f06ea6f24a8e1aa7e1aaa51f035458fd" exitCode=143 Dec 05 17:35:30 crc kubenswrapper[4753]: I1205 17:35:30.235081 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"8e5b52fd-87d5-4912-a8b2-305d4271a5ba","Type":"ContainerDied","Data":"415e3d210ea73c9242e858919e7b9133f06ea6f24a8e1aa7e1aaa51f035458fd"} Dec 05 17:35:30 crc kubenswrapper[4753]: I1205 17:35:30.244255 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"02bce72e-5cac-4c12-b1ba-e08f2b95cbe2","Type":"ContainerStarted","Data":"9ff6f3d5edaee594599b021425b5445fbe7e30b656cc2d968504fb5c6c7dbc00"} Dec 05 17:35:30 crc kubenswrapper[4753]: I1205 17:35:30.244444 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-6d854f58c-mvlpx" Dec 05 17:35:31 crc kubenswrapper[4753]: I1205 17:35:31.068523 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 17:35:31 crc kubenswrapper[4753]: I1205 17:35:31.069105 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="870a2370-eaf3-4682-9bf9-712d62c24e28" containerName="glance-log" containerID="cri-o://1823306ef1c79c57969ca58985ac954e1e0ec367d52bab894774055565525547" gracePeriod=30 Dec 05 17:35:31 crc kubenswrapper[4753]: I1205 17:35:31.069248 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="870a2370-eaf3-4682-9bf9-712d62c24e28" containerName="glance-httpd" containerID="cri-o://bf901a23935a1fb981155a48242300c7930db807182884301ccbfb3a25e80644" gracePeriod=30 Dec 05 17:35:31 crc kubenswrapper[4753]: I1205 17:35:31.255714 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"02bce72e-5cac-4c12-b1ba-e08f2b95cbe2","Type":"ContainerStarted","Data":"58cdb6823e04d7abdb1c84054ca8e3c23b19166526c8b1baf3e245d69183d609"} Dec 05 17:35:31 crc kubenswrapper[4753]: I1205 17:35:31.259672 4753 generic.go:334] "Generic (PLEG): container finished" podID="870a2370-eaf3-4682-9bf9-712d62c24e28" containerID="1823306ef1c79c57969ca58985ac954e1e0ec367d52bab894774055565525547" exitCode=143 Dec 05 17:35:31 crc kubenswrapper[4753]: I1205 17:35:31.259940 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"870a2370-eaf3-4682-9bf9-712d62c24e28","Type":"ContainerDied","Data":"1823306ef1c79c57969ca58985ac954e1e0ec367d52bab894774055565525547"} Dec 05 17:35:32 crc kubenswrapper[4753]: I1205 17:35:32.271850 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"02bce72e-5cac-4c12-b1ba-e08f2b95cbe2","Type":"ContainerStarted","Data":"efcc53512ec04fe6e0bf8b43a570210a945873f6202b09f4d0d88b4e5f0dd03e"} Dec 05 17:35:32 crc kubenswrapper[4753]: I1205 17:35:32.272111 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 17:35:32 crc kubenswrapper[4753]: I1205 17:35:32.272098 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="02bce72e-5cac-4c12-b1ba-e08f2b95cbe2" containerName="proxy-httpd" containerID="cri-o://efcc53512ec04fe6e0bf8b43a570210a945873f6202b09f4d0d88b4e5f0dd03e" gracePeriod=30 Dec 05 17:35:32 crc kubenswrapper[4753]: I1205 17:35:32.272056 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="02bce72e-5cac-4c12-b1ba-e08f2b95cbe2" containerName="sg-core" containerID="cri-o://58cdb6823e04d7abdb1c84054ca8e3c23b19166526c8b1baf3e245d69183d609" gracePeriod=30 Dec 05 17:35:32 crc kubenswrapper[4753]: I1205 17:35:32.272028 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="02bce72e-5cac-4c12-b1ba-e08f2b95cbe2" containerName="ceilometer-central-agent" containerID="cri-o://67211f695f145a32745ef7a96d02f2c1a5f32b27c2adc3b25b1d2c7eb4612303" gracePeriod=30 Dec 05 17:35:32 crc kubenswrapper[4753]: I1205 17:35:32.272331 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="02bce72e-5cac-4c12-b1ba-e08f2b95cbe2" containerName="ceilometer-notification-agent" containerID="cri-o://9ff6f3d5edaee594599b021425b5445fbe7e30b656cc2d968504fb5c6c7dbc00" gracePeriod=30 Dec 05 17:35:32 crc kubenswrapper[4753]: I1205 17:35:32.300412 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=7.719272431 podStartE2EDuration="11.300396716s" podCreationTimestamp="2025-12-05 17:35:21 +0000 UTC" firstStartedPulling="2025-12-05 17:35:28.13449855 +0000 UTC m=+1866.637605556" lastFinishedPulling="2025-12-05 17:35:31.715622835 +0000 UTC m=+1870.218729841" observedRunningTime="2025-12-05 17:35:32.29981153 +0000 UTC m=+1870.802918556" watchObservedRunningTime="2025-12-05 17:35:32.300396716 +0000 UTC m=+1870.803503722" Dec 05 17:35:33 crc kubenswrapper[4753]: I1205 17:35:33.024845 4753 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-external-api-0" podUID="8e5b52fd-87d5-4912-a8b2-305d4271a5ba" containerName="glance-httpd" probeResult="failure" output="Get \"https://10.217.0.173:9292/healthcheck\": read tcp 10.217.0.2:56798->10.217.0.173:9292: read: connection reset by peer" Dec 05 17:35:33 crc kubenswrapper[4753]: I1205 17:35:33.024908 4753 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-external-api-0" podUID="8e5b52fd-87d5-4912-a8b2-305d4271a5ba" containerName="glance-log" probeResult="failure" output="Get \"https://10.217.0.173:9292/healthcheck\": read tcp 10.217.0.2:56782->10.217.0.173:9292: read: connection reset by peer" Dec 05 17:35:33 crc kubenswrapper[4753]: I1205 17:35:33.296314 4753 generic.go:334] "Generic (PLEG): container finished" podID="02bce72e-5cac-4c12-b1ba-e08f2b95cbe2" containerID="efcc53512ec04fe6e0bf8b43a570210a945873f6202b09f4d0d88b4e5f0dd03e" exitCode=0 Dec 05 17:35:33 crc kubenswrapper[4753]: I1205 17:35:33.296652 4753 generic.go:334] "Generic (PLEG): container finished" podID="02bce72e-5cac-4c12-b1ba-e08f2b95cbe2" containerID="58cdb6823e04d7abdb1c84054ca8e3c23b19166526c8b1baf3e245d69183d609" exitCode=2 Dec 05 17:35:33 crc kubenswrapper[4753]: I1205 17:35:33.296662 4753 generic.go:334] "Generic (PLEG): container finished" podID="02bce72e-5cac-4c12-b1ba-e08f2b95cbe2" containerID="9ff6f3d5edaee594599b021425b5445fbe7e30b656cc2d968504fb5c6c7dbc00" exitCode=0 Dec 05 17:35:33 crc kubenswrapper[4753]: I1205 17:35:33.296707 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"02bce72e-5cac-4c12-b1ba-e08f2b95cbe2","Type":"ContainerDied","Data":"efcc53512ec04fe6e0bf8b43a570210a945873f6202b09f4d0d88b4e5f0dd03e"} Dec 05 17:35:33 crc kubenswrapper[4753]: I1205 17:35:33.296735 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"02bce72e-5cac-4c12-b1ba-e08f2b95cbe2","Type":"ContainerDied","Data":"58cdb6823e04d7abdb1c84054ca8e3c23b19166526c8b1baf3e245d69183d609"} Dec 05 17:35:33 crc kubenswrapper[4753]: I1205 17:35:33.296745 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"02bce72e-5cac-4c12-b1ba-e08f2b95cbe2","Type":"ContainerDied","Data":"9ff6f3d5edaee594599b021425b5445fbe7e30b656cc2d968504fb5c6c7dbc00"} Dec 05 17:35:33 crc kubenswrapper[4753]: I1205 17:35:33.300847 4753 generic.go:334] "Generic (PLEG): container finished" podID="8e5b52fd-87d5-4912-a8b2-305d4271a5ba" containerID="31ff5943ec907ee59d90022c023858050de26c11bb52b8a711ff3910dd69ad61" exitCode=0 Dec 05 17:35:33 crc kubenswrapper[4753]: I1205 17:35:33.300900 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"8e5b52fd-87d5-4912-a8b2-305d4271a5ba","Type":"ContainerDied","Data":"31ff5943ec907ee59d90022c023858050de26c11bb52b8a711ff3910dd69ad61"} Dec 05 17:35:33 crc kubenswrapper[4753]: I1205 17:35:33.650078 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 17:35:33 crc kubenswrapper[4753]: I1205 17:35:33.742967 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8e5b52fd-87d5-4912-a8b2-305d4271a5ba-scripts\") pod \"8e5b52fd-87d5-4912-a8b2-305d4271a5ba\" (UID: \"8e5b52fd-87d5-4912-a8b2-305d4271a5ba\") " Dec 05 17:35:33 crc kubenswrapper[4753]: I1205 17:35:33.743036 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8e5b52fd-87d5-4912-a8b2-305d4271a5ba-public-tls-certs\") pod \"8e5b52fd-87d5-4912-a8b2-305d4271a5ba\" (UID: \"8e5b52fd-87d5-4912-a8b2-305d4271a5ba\") " Dec 05 17:35:33 crc kubenswrapper[4753]: I1205 17:35:33.743112 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cqpfr\" (UniqueName: \"kubernetes.io/projected/8e5b52fd-87d5-4912-a8b2-305d4271a5ba-kube-api-access-cqpfr\") pod \"8e5b52fd-87d5-4912-a8b2-305d4271a5ba\" (UID: \"8e5b52fd-87d5-4912-a8b2-305d4271a5ba\") " Dec 05 17:35:33 crc kubenswrapper[4753]: I1205 17:35:33.743368 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8aedb3c5-dd54-4f7e-b1a5-0e458df9d312\") pod \"8e5b52fd-87d5-4912-a8b2-305d4271a5ba\" (UID: \"8e5b52fd-87d5-4912-a8b2-305d4271a5ba\") " Dec 05 17:35:33 crc kubenswrapper[4753]: I1205 17:35:33.743404 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e5b52fd-87d5-4912-a8b2-305d4271a5ba-combined-ca-bundle\") pod \"8e5b52fd-87d5-4912-a8b2-305d4271a5ba\" (UID: \"8e5b52fd-87d5-4912-a8b2-305d4271a5ba\") " Dec 05 17:35:33 crc kubenswrapper[4753]: I1205 17:35:33.743545 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8e5b52fd-87d5-4912-a8b2-305d4271a5ba-config-data\") pod \"8e5b52fd-87d5-4912-a8b2-305d4271a5ba\" (UID: \"8e5b52fd-87d5-4912-a8b2-305d4271a5ba\") " Dec 05 17:35:33 crc kubenswrapper[4753]: I1205 17:35:33.743584 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8e5b52fd-87d5-4912-a8b2-305d4271a5ba-httpd-run\") pod \"8e5b52fd-87d5-4912-a8b2-305d4271a5ba\" (UID: \"8e5b52fd-87d5-4912-a8b2-305d4271a5ba\") " Dec 05 17:35:33 crc kubenswrapper[4753]: I1205 17:35:33.743610 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8e5b52fd-87d5-4912-a8b2-305d4271a5ba-logs\") pod \"8e5b52fd-87d5-4912-a8b2-305d4271a5ba\" (UID: \"8e5b52fd-87d5-4912-a8b2-305d4271a5ba\") " Dec 05 17:35:33 crc kubenswrapper[4753]: I1205 17:35:33.745600 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8e5b52fd-87d5-4912-a8b2-305d4271a5ba-logs" (OuterVolumeSpecName: "logs") pod "8e5b52fd-87d5-4912-a8b2-305d4271a5ba" (UID: "8e5b52fd-87d5-4912-a8b2-305d4271a5ba"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:35:33 crc kubenswrapper[4753]: I1205 17:35:33.745756 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8e5b52fd-87d5-4912-a8b2-305d4271a5ba-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "8e5b52fd-87d5-4912-a8b2-305d4271a5ba" (UID: "8e5b52fd-87d5-4912-a8b2-305d4271a5ba"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:35:33 crc kubenswrapper[4753]: I1205 17:35:33.752484 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8e5b52fd-87d5-4912-a8b2-305d4271a5ba-scripts" (OuterVolumeSpecName: "scripts") pod "8e5b52fd-87d5-4912-a8b2-305d4271a5ba" (UID: "8e5b52fd-87d5-4912-a8b2-305d4271a5ba"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:35:33 crc kubenswrapper[4753]: I1205 17:35:33.763855 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8aedb3c5-dd54-4f7e-b1a5-0e458df9d312" (OuterVolumeSpecName: "glance") pod "8e5b52fd-87d5-4912-a8b2-305d4271a5ba" (UID: "8e5b52fd-87d5-4912-a8b2-305d4271a5ba"). InnerVolumeSpecName "pvc-8aedb3c5-dd54-4f7e-b1a5-0e458df9d312". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 05 17:35:33 crc kubenswrapper[4753]: I1205 17:35:33.783454 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8e5b52fd-87d5-4912-a8b2-305d4271a5ba-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8e5b52fd-87d5-4912-a8b2-305d4271a5ba" (UID: "8e5b52fd-87d5-4912-a8b2-305d4271a5ba"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:35:33 crc kubenswrapper[4753]: I1205 17:35:33.792663 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8e5b52fd-87d5-4912-a8b2-305d4271a5ba-kube-api-access-cqpfr" (OuterVolumeSpecName: "kube-api-access-cqpfr") pod "8e5b52fd-87d5-4912-a8b2-305d4271a5ba" (UID: "8e5b52fd-87d5-4912-a8b2-305d4271a5ba"). InnerVolumeSpecName "kube-api-access-cqpfr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:35:33 crc kubenswrapper[4753]: I1205 17:35:33.800337 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8e5b52fd-87d5-4912-a8b2-305d4271a5ba-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "8e5b52fd-87d5-4912-a8b2-305d4271a5ba" (UID: "8e5b52fd-87d5-4912-a8b2-305d4271a5ba"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:35:33 crc kubenswrapper[4753]: I1205 17:35:33.827686 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8e5b52fd-87d5-4912-a8b2-305d4271a5ba-config-data" (OuterVolumeSpecName: "config-data") pod "8e5b52fd-87d5-4912-a8b2-305d4271a5ba" (UID: "8e5b52fd-87d5-4912-a8b2-305d4271a5ba"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:35:33 crc kubenswrapper[4753]: I1205 17:35:33.846233 4753 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-8aedb3c5-dd54-4f7e-b1a5-0e458df9d312\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8aedb3c5-dd54-4f7e-b1a5-0e458df9d312\") on node \"crc\" " Dec 05 17:35:33 crc kubenswrapper[4753]: I1205 17:35:33.846272 4753 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8e5b52fd-87d5-4912-a8b2-305d4271a5ba-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:33 crc kubenswrapper[4753]: I1205 17:35:33.846288 4753 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8e5b52fd-87d5-4912-a8b2-305d4271a5ba-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:33 crc kubenswrapper[4753]: I1205 17:35:33.846298 4753 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8e5b52fd-87d5-4912-a8b2-305d4271a5ba-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:33 crc kubenswrapper[4753]: I1205 17:35:33.846308 4753 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8e5b52fd-87d5-4912-a8b2-305d4271a5ba-logs\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:33 crc kubenswrapper[4753]: I1205 17:35:33.846316 4753 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8e5b52fd-87d5-4912-a8b2-305d4271a5ba-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:33 crc kubenswrapper[4753]: I1205 17:35:33.846324 4753 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8e5b52fd-87d5-4912-a8b2-305d4271a5ba-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:33 crc kubenswrapper[4753]: I1205 17:35:33.846334 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cqpfr\" (UniqueName: \"kubernetes.io/projected/8e5b52fd-87d5-4912-a8b2-305d4271a5ba-kube-api-access-cqpfr\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:33 crc kubenswrapper[4753]: I1205 17:35:33.885523 4753 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Dec 05 17:35:33 crc kubenswrapper[4753]: I1205 17:35:33.885790 4753 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-8aedb3c5-dd54-4f7e-b1a5-0e458df9d312" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8aedb3c5-dd54-4f7e-b1a5-0e458df9d312") on node "crc" Dec 05 17:35:33 crc kubenswrapper[4753]: I1205 17:35:33.949195 4753 reconciler_common.go:293] "Volume detached for volume \"pvc-8aedb3c5-dd54-4f7e-b1a5-0e458df9d312\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8aedb3c5-dd54-4f7e-b1a5-0e458df9d312\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.038065 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-7rkhf"] Dec 05 17:35:34 crc kubenswrapper[4753]: E1205 17:35:34.038501 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="541c84bb-d947-4c64-8930-787574d01f73" containerName="extract-utilities" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.038518 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="541c84bb-d947-4c64-8930-787574d01f73" containerName="extract-utilities" Dec 05 17:35:34 crc kubenswrapper[4753]: E1205 17:35:34.038531 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e5b52fd-87d5-4912-a8b2-305d4271a5ba" containerName="glance-log" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.038537 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e5b52fd-87d5-4912-a8b2-305d4271a5ba" containerName="glance-log" Dec 05 17:35:34 crc kubenswrapper[4753]: E1205 17:35:34.038551 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e5b52fd-87d5-4912-a8b2-305d4271a5ba" containerName="glance-httpd" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.038557 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e5b52fd-87d5-4912-a8b2-305d4271a5ba" containerName="glance-httpd" Dec 05 17:35:34 crc kubenswrapper[4753]: E1205 17:35:34.038570 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="541c84bb-d947-4c64-8930-787574d01f73" containerName="registry-server" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.038576 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="541c84bb-d947-4c64-8930-787574d01f73" containerName="registry-server" Dec 05 17:35:34 crc kubenswrapper[4753]: E1205 17:35:34.038583 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="541c84bb-d947-4c64-8930-787574d01f73" containerName="extract-content" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.038589 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="541c84bb-d947-4c64-8930-787574d01f73" containerName="extract-content" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.038761 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="8e5b52fd-87d5-4912-a8b2-305d4271a5ba" containerName="glance-log" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.038786 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="541c84bb-d947-4c64-8930-787574d01f73" containerName="registry-server" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.038798 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="8e5b52fd-87d5-4912-a8b2-305d4271a5ba" containerName="glance-httpd" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.039611 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-7rkhf" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.057343 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-7rkhf"] Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.147815 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-ncw7r"] Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.149347 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-ncw7r" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.154217 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b33874b7-5966-44b0-9a9c-a555e52127b9-operator-scripts\") pod \"nova-api-db-create-7rkhf\" (UID: \"b33874b7-5966-44b0-9a9c-a555e52127b9\") " pod="openstack/nova-api-db-create-7rkhf" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.154288 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4xxtx\" (UniqueName: \"kubernetes.io/projected/b33874b7-5966-44b0-9a9c-a555e52127b9-kube-api-access-4xxtx\") pod \"nova-api-db-create-7rkhf\" (UID: \"b33874b7-5966-44b0-9a9c-a555e52127b9\") " pod="openstack/nova-api-db-create-7rkhf" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.156459 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-ncw7r"] Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.254758 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-qvb5q"] Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.255931 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b33874b7-5966-44b0-9a9c-a555e52127b9-operator-scripts\") pod \"nova-api-db-create-7rkhf\" (UID: \"b33874b7-5966-44b0-9a9c-a555e52127b9\") " pod="openstack/nova-api-db-create-7rkhf" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.256004 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dtvzp\" (UniqueName: \"kubernetes.io/projected/8b231cbb-8d95-4948-aba8-825809d77fa7-kube-api-access-dtvzp\") pod \"nova-cell0-db-create-ncw7r\" (UID: \"8b231cbb-8d95-4948-aba8-825809d77fa7\") " pod="openstack/nova-cell0-db-create-ncw7r" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.256033 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4xxtx\" (UniqueName: \"kubernetes.io/projected/b33874b7-5966-44b0-9a9c-a555e52127b9-kube-api-access-4xxtx\") pod \"nova-api-db-create-7rkhf\" (UID: \"b33874b7-5966-44b0-9a9c-a555e52127b9\") " pod="openstack/nova-api-db-create-7rkhf" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.256075 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8b231cbb-8d95-4948-aba8-825809d77fa7-operator-scripts\") pod \"nova-cell0-db-create-ncw7r\" (UID: \"8b231cbb-8d95-4948-aba8-825809d77fa7\") " pod="openstack/nova-cell0-db-create-ncw7r" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.256192 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-qvb5q" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.257293 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b33874b7-5966-44b0-9a9c-a555e52127b9-operator-scripts\") pod \"nova-api-db-create-7rkhf\" (UID: \"b33874b7-5966-44b0-9a9c-a555e52127b9\") " pod="openstack/nova-api-db-create-7rkhf" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.270870 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-ae67-account-create-update-lxk95"] Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.273103 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-ae67-account-create-update-lxk95" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.275754 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.287773 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4xxtx\" (UniqueName: \"kubernetes.io/projected/b33874b7-5966-44b0-9a9c-a555e52127b9-kube-api-access-4xxtx\") pod \"nova-api-db-create-7rkhf\" (UID: \"b33874b7-5966-44b0-9a9c-a555e52127b9\") " pod="openstack/nova-api-db-create-7rkhf" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.305794 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-qvb5q"] Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.332351 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-ae67-account-create-update-lxk95"] Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.341370 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"8e5b52fd-87d5-4912-a8b2-305d4271a5ba","Type":"ContainerDied","Data":"db354364c39b9761b24def3ce625210c8f748702954e88f27a292043415b9c92"} Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.341420 4753 scope.go:117] "RemoveContainer" containerID="31ff5943ec907ee59d90022c023858050de26c11bb52b8a711ff3910dd69ad61" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.341430 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.358212 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qmzlk\" (UniqueName: \"kubernetes.io/projected/d9465bae-5a66-4ab6-956b-1258eb08db35-kube-api-access-qmzlk\") pod \"nova-api-ae67-account-create-update-lxk95\" (UID: \"d9465bae-5a66-4ab6-956b-1258eb08db35\") " pod="openstack/nova-api-ae67-account-create-update-lxk95" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.358301 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dtvzp\" (UniqueName: \"kubernetes.io/projected/8b231cbb-8d95-4948-aba8-825809d77fa7-kube-api-access-dtvzp\") pod \"nova-cell0-db-create-ncw7r\" (UID: \"8b231cbb-8d95-4948-aba8-825809d77fa7\") " pod="openstack/nova-cell0-db-create-ncw7r" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.358327 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xrkj9\" (UniqueName: \"kubernetes.io/projected/762045ba-049a-471e-b6dc-ac74b0c28bfa-kube-api-access-xrkj9\") pod \"nova-cell1-db-create-qvb5q\" (UID: \"762045ba-049a-471e-b6dc-ac74b0c28bfa\") " pod="openstack/nova-cell1-db-create-qvb5q" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.358375 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8b231cbb-8d95-4948-aba8-825809d77fa7-operator-scripts\") pod \"nova-cell0-db-create-ncw7r\" (UID: \"8b231cbb-8d95-4948-aba8-825809d77fa7\") " pod="openstack/nova-cell0-db-create-ncw7r" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.358423 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d9465bae-5a66-4ab6-956b-1258eb08db35-operator-scripts\") pod \"nova-api-ae67-account-create-update-lxk95\" (UID: \"d9465bae-5a66-4ab6-956b-1258eb08db35\") " pod="openstack/nova-api-ae67-account-create-update-lxk95" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.358472 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/762045ba-049a-471e-b6dc-ac74b0c28bfa-operator-scripts\") pod \"nova-cell1-db-create-qvb5q\" (UID: \"762045ba-049a-471e-b6dc-ac74b0c28bfa\") " pod="openstack/nova-cell1-db-create-qvb5q" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.359330 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8b231cbb-8d95-4948-aba8-825809d77fa7-operator-scripts\") pod \"nova-cell0-db-create-ncw7r\" (UID: \"8b231cbb-8d95-4948-aba8-825809d77fa7\") " pod="openstack/nova-cell0-db-create-ncw7r" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.361626 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-7rkhf" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.384934 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dtvzp\" (UniqueName: \"kubernetes.io/projected/8b231cbb-8d95-4948-aba8-825809d77fa7-kube-api-access-dtvzp\") pod \"nova-cell0-db-create-ncw7r\" (UID: \"8b231cbb-8d95-4948-aba8-825809d77fa7\") " pod="openstack/nova-cell0-db-create-ncw7r" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.463131 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/762045ba-049a-471e-b6dc-ac74b0c28bfa-operator-scripts\") pod \"nova-cell1-db-create-qvb5q\" (UID: \"762045ba-049a-471e-b6dc-ac74b0c28bfa\") " pod="openstack/nova-cell1-db-create-qvb5q" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.463284 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qmzlk\" (UniqueName: \"kubernetes.io/projected/d9465bae-5a66-4ab6-956b-1258eb08db35-kube-api-access-qmzlk\") pod \"nova-api-ae67-account-create-update-lxk95\" (UID: \"d9465bae-5a66-4ab6-956b-1258eb08db35\") " pod="openstack/nova-api-ae67-account-create-update-lxk95" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.463324 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xrkj9\" (UniqueName: \"kubernetes.io/projected/762045ba-049a-471e-b6dc-ac74b0c28bfa-kube-api-access-xrkj9\") pod \"nova-cell1-db-create-qvb5q\" (UID: \"762045ba-049a-471e-b6dc-ac74b0c28bfa\") " pod="openstack/nova-cell1-db-create-qvb5q" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.463372 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d9465bae-5a66-4ab6-956b-1258eb08db35-operator-scripts\") pod \"nova-api-ae67-account-create-update-lxk95\" (UID: \"d9465bae-5a66-4ab6-956b-1258eb08db35\") " pod="openstack/nova-api-ae67-account-create-update-lxk95" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.464073 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d9465bae-5a66-4ab6-956b-1258eb08db35-operator-scripts\") pod \"nova-api-ae67-account-create-update-lxk95\" (UID: \"d9465bae-5a66-4ab6-956b-1258eb08db35\") " pod="openstack/nova-api-ae67-account-create-update-lxk95" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.464611 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/762045ba-049a-471e-b6dc-ac74b0c28bfa-operator-scripts\") pod \"nova-cell1-db-create-qvb5q\" (UID: \"762045ba-049a-471e-b6dc-ac74b0c28bfa\") " pod="openstack/nova-cell1-db-create-qvb5q" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.475111 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-080f-account-create-update-hsplf"] Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.477747 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-080f-account-create-update-hsplf" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.479599 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.482079 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-080f-account-create-update-hsplf"] Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.483445 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xrkj9\" (UniqueName: \"kubernetes.io/projected/762045ba-049a-471e-b6dc-ac74b0c28bfa-kube-api-access-xrkj9\") pod \"nova-cell1-db-create-qvb5q\" (UID: \"762045ba-049a-471e-b6dc-ac74b0c28bfa\") " pod="openstack/nova-cell1-db-create-qvb5q" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.487007 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qmzlk\" (UniqueName: \"kubernetes.io/projected/d9465bae-5a66-4ab6-956b-1258eb08db35-kube-api-access-qmzlk\") pod \"nova-api-ae67-account-create-update-lxk95\" (UID: \"d9465bae-5a66-4ab6-956b-1258eb08db35\") " pod="openstack/nova-api-ae67-account-create-update-lxk95" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.491594 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-ncw7r" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.516264 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-ae67-account-create-update-lxk95" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.543695 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.559077 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.570094 4753 scope.go:117] "RemoveContainer" containerID="415e3d210ea73c9242e858919e7b9133f06ea6f24a8e1aa7e1aaa51f035458fd" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.582809 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.583785 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-qvb5q" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.585097 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.591985 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.593634 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.616192 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.646304 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-134b-account-create-update-qf8qh"] Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.648769 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-134b-account-create-update-qf8qh" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.666853 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.669833 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qlqt8\" (UniqueName: \"kubernetes.io/projected/2f79146b-e55c-4ccf-93b3-91829167768b-kube-api-access-qlqt8\") pod \"nova-cell0-080f-account-create-update-hsplf\" (UID: \"2f79146b-e55c-4ccf-93b3-91829167768b\") " pod="openstack/nova-cell0-080f-account-create-update-hsplf" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.669991 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2f79146b-e55c-4ccf-93b3-91829167768b-operator-scripts\") pod \"nova-cell0-080f-account-create-update-hsplf\" (UID: \"2f79146b-e55c-4ccf-93b3-91829167768b\") " pod="openstack/nova-cell0-080f-account-create-update-hsplf" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.698940 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-134b-account-create-update-qf8qh"] Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.772476 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vj8ff\" (UniqueName: \"kubernetes.io/projected/187c344a-5fdf-47db-b103-de9458e6a58a-kube-api-access-vj8ff\") pod \"glance-default-external-api-0\" (UID: \"187c344a-5fdf-47db-b103-de9458e6a58a\") " pod="openstack/glance-default-external-api-0" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.772535 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/187c344a-5fdf-47db-b103-de9458e6a58a-scripts\") pod \"glance-default-external-api-0\" (UID: \"187c344a-5fdf-47db-b103-de9458e6a58a\") " pod="openstack/glance-default-external-api-0" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.772578 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/187c344a-5fdf-47db-b103-de9458e6a58a-logs\") pod \"glance-default-external-api-0\" (UID: \"187c344a-5fdf-47db-b103-de9458e6a58a\") " pod="openstack/glance-default-external-api-0" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.772603 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/187c344a-5fdf-47db-b103-de9458e6a58a-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"187c344a-5fdf-47db-b103-de9458e6a58a\") " pod="openstack/glance-default-external-api-0" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.772668 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2f79146b-e55c-4ccf-93b3-91829167768b-operator-scripts\") pod \"nova-cell0-080f-account-create-update-hsplf\" (UID: \"2f79146b-e55c-4ccf-93b3-91829167768b\") " pod="openstack/nova-cell0-080f-account-create-update-hsplf" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.772688 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/187c344a-5fdf-47db-b103-de9458e6a58a-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"187c344a-5fdf-47db-b103-de9458e6a58a\") " pod="openstack/glance-default-external-api-0" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.772759 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-8aedb3c5-dd54-4f7e-b1a5-0e458df9d312\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8aedb3c5-dd54-4f7e-b1a5-0e458df9d312\") pod \"glance-default-external-api-0\" (UID: \"187c344a-5fdf-47db-b103-de9458e6a58a\") " pod="openstack/glance-default-external-api-0" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.772785 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/de05c77d-b326-4c0c-996f-a78f35ae6694-operator-scripts\") pod \"nova-cell1-134b-account-create-update-qf8qh\" (UID: \"de05c77d-b326-4c0c-996f-a78f35ae6694\") " pod="openstack/nova-cell1-134b-account-create-update-qf8qh" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.772805 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gzq7b\" (UniqueName: \"kubernetes.io/projected/de05c77d-b326-4c0c-996f-a78f35ae6694-kube-api-access-gzq7b\") pod \"nova-cell1-134b-account-create-update-qf8qh\" (UID: \"de05c77d-b326-4c0c-996f-a78f35ae6694\") " pod="openstack/nova-cell1-134b-account-create-update-qf8qh" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.772826 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qlqt8\" (UniqueName: \"kubernetes.io/projected/2f79146b-e55c-4ccf-93b3-91829167768b-kube-api-access-qlqt8\") pod \"nova-cell0-080f-account-create-update-hsplf\" (UID: \"2f79146b-e55c-4ccf-93b3-91829167768b\") " pod="openstack/nova-cell0-080f-account-create-update-hsplf" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.772844 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/187c344a-5fdf-47db-b103-de9458e6a58a-config-data\") pod \"glance-default-external-api-0\" (UID: \"187c344a-5fdf-47db-b103-de9458e6a58a\") " pod="openstack/glance-default-external-api-0" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.772864 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/187c344a-5fdf-47db-b103-de9458e6a58a-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"187c344a-5fdf-47db-b103-de9458e6a58a\") " pod="openstack/glance-default-external-api-0" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.773609 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2f79146b-e55c-4ccf-93b3-91829167768b-operator-scripts\") pod \"nova-cell0-080f-account-create-update-hsplf\" (UID: \"2f79146b-e55c-4ccf-93b3-91829167768b\") " pod="openstack/nova-cell0-080f-account-create-update-hsplf" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.875906 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-8aedb3c5-dd54-4f7e-b1a5-0e458df9d312\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8aedb3c5-dd54-4f7e-b1a5-0e458df9d312\") pod \"glance-default-external-api-0\" (UID: \"187c344a-5fdf-47db-b103-de9458e6a58a\") " pod="openstack/glance-default-external-api-0" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.876531 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/de05c77d-b326-4c0c-996f-a78f35ae6694-operator-scripts\") pod \"nova-cell1-134b-account-create-update-qf8qh\" (UID: \"de05c77d-b326-4c0c-996f-a78f35ae6694\") " pod="openstack/nova-cell1-134b-account-create-update-qf8qh" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.876640 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gzq7b\" (UniqueName: \"kubernetes.io/projected/de05c77d-b326-4c0c-996f-a78f35ae6694-kube-api-access-gzq7b\") pod \"nova-cell1-134b-account-create-update-qf8qh\" (UID: \"de05c77d-b326-4c0c-996f-a78f35ae6694\") " pod="openstack/nova-cell1-134b-account-create-update-qf8qh" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.876742 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/187c344a-5fdf-47db-b103-de9458e6a58a-config-data\") pod \"glance-default-external-api-0\" (UID: \"187c344a-5fdf-47db-b103-de9458e6a58a\") " pod="openstack/glance-default-external-api-0" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.880582 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/187c344a-5fdf-47db-b103-de9458e6a58a-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"187c344a-5fdf-47db-b103-de9458e6a58a\") " pod="openstack/glance-default-external-api-0" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.880825 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vj8ff\" (UniqueName: \"kubernetes.io/projected/187c344a-5fdf-47db-b103-de9458e6a58a-kube-api-access-vj8ff\") pod \"glance-default-external-api-0\" (UID: \"187c344a-5fdf-47db-b103-de9458e6a58a\") " pod="openstack/glance-default-external-api-0" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.880963 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/187c344a-5fdf-47db-b103-de9458e6a58a-scripts\") pod \"glance-default-external-api-0\" (UID: \"187c344a-5fdf-47db-b103-de9458e6a58a\") " pod="openstack/glance-default-external-api-0" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.881080 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/187c344a-5fdf-47db-b103-de9458e6a58a-logs\") pod \"glance-default-external-api-0\" (UID: \"187c344a-5fdf-47db-b103-de9458e6a58a\") " pod="openstack/glance-default-external-api-0" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.881195 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/187c344a-5fdf-47db-b103-de9458e6a58a-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"187c344a-5fdf-47db-b103-de9458e6a58a\") " pod="openstack/glance-default-external-api-0" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.881415 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/187c344a-5fdf-47db-b103-de9458e6a58a-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"187c344a-5fdf-47db-b103-de9458e6a58a\") " pod="openstack/glance-default-external-api-0" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.882067 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/187c344a-5fdf-47db-b103-de9458e6a58a-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"187c344a-5fdf-47db-b103-de9458e6a58a\") " pod="openstack/glance-default-external-api-0" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.879297 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qlqt8\" (UniqueName: \"kubernetes.io/projected/2f79146b-e55c-4ccf-93b3-91829167768b-kube-api-access-qlqt8\") pod \"nova-cell0-080f-account-create-update-hsplf\" (UID: \"2f79146b-e55c-4ccf-93b3-91829167768b\") " pod="openstack/nova-cell0-080f-account-create-update-hsplf" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.879816 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/de05c77d-b326-4c0c-996f-a78f35ae6694-operator-scripts\") pod \"nova-cell1-134b-account-create-update-qf8qh\" (UID: \"de05c77d-b326-4c0c-996f-a78f35ae6694\") " pod="openstack/nova-cell1-134b-account-create-update-qf8qh" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.883207 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/187c344a-5fdf-47db-b103-de9458e6a58a-logs\") pod \"glance-default-external-api-0\" (UID: \"187c344a-5fdf-47db-b103-de9458e6a58a\") " pod="openstack/glance-default-external-api-0" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.891620 4753 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.891667 4753 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-8aedb3c5-dd54-4f7e-b1a5-0e458df9d312\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8aedb3c5-dd54-4f7e-b1a5-0e458df9d312\") pod \"glance-default-external-api-0\" (UID: \"187c344a-5fdf-47db-b103-de9458e6a58a\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/0a0f782b6c9e85c3ca02611e1c19370b05b7adf40a84b1c1fba977879932e0cc/globalmount\"" pod="openstack/glance-default-external-api-0" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.899064 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/187c344a-5fdf-47db-b103-de9458e6a58a-scripts\") pod \"glance-default-external-api-0\" (UID: \"187c344a-5fdf-47db-b103-de9458e6a58a\") " pod="openstack/glance-default-external-api-0" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.906087 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/187c344a-5fdf-47db-b103-de9458e6a58a-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"187c344a-5fdf-47db-b103-de9458e6a58a\") " pod="openstack/glance-default-external-api-0" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.906889 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vj8ff\" (UniqueName: \"kubernetes.io/projected/187c344a-5fdf-47db-b103-de9458e6a58a-kube-api-access-vj8ff\") pod \"glance-default-external-api-0\" (UID: \"187c344a-5fdf-47db-b103-de9458e6a58a\") " pod="openstack/glance-default-external-api-0" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.921327 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gzq7b\" (UniqueName: \"kubernetes.io/projected/de05c77d-b326-4c0c-996f-a78f35ae6694-kube-api-access-gzq7b\") pod \"nova-cell1-134b-account-create-update-qf8qh\" (UID: \"de05c77d-b326-4c0c-996f-a78f35ae6694\") " pod="openstack/nova-cell1-134b-account-create-update-qf8qh" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.932348 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/187c344a-5fdf-47db-b103-de9458e6a58a-config-data\") pod \"glance-default-external-api-0\" (UID: \"187c344a-5fdf-47db-b103-de9458e6a58a\") " pod="openstack/glance-default-external-api-0" Dec 05 17:35:34 crc kubenswrapper[4753]: I1205 17:35:34.935186 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/187c344a-5fdf-47db-b103-de9458e6a58a-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"187c344a-5fdf-47db-b103-de9458e6a58a\") " pod="openstack/glance-default-external-api-0" Dec 05 17:35:35 crc kubenswrapper[4753]: I1205 17:35:35.009643 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-134b-account-create-update-qf8qh" Dec 05 17:35:35 crc kubenswrapper[4753]: I1205 17:35:35.038642 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-8aedb3c5-dd54-4f7e-b1a5-0e458df9d312\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-8aedb3c5-dd54-4f7e-b1a5-0e458df9d312\") pod \"glance-default-external-api-0\" (UID: \"187c344a-5fdf-47db-b103-de9458e6a58a\") " pod="openstack/glance-default-external-api-0" Dec 05 17:35:35 crc kubenswrapper[4753]: I1205 17:35:35.147908 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-080f-account-create-update-hsplf" Dec 05 17:35:35 crc kubenswrapper[4753]: I1205 17:35:35.205625 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 17:35:35 crc kubenswrapper[4753]: I1205 17:35:35.324882 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 17:35:35 crc kubenswrapper[4753]: I1205 17:35:35.441077 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nxh7m\" (UniqueName: \"kubernetes.io/projected/870a2370-eaf3-4682-9bf9-712d62c24e28-kube-api-access-nxh7m\") pod \"870a2370-eaf3-4682-9bf9-712d62c24e28\" (UID: \"870a2370-eaf3-4682-9bf9-712d62c24e28\") " Dec 05 17:35:35 crc kubenswrapper[4753]: I1205 17:35:35.441728 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/870a2370-eaf3-4682-9bf9-712d62c24e28-logs\") pod \"870a2370-eaf3-4682-9bf9-712d62c24e28\" (UID: \"870a2370-eaf3-4682-9bf9-712d62c24e28\") " Dec 05 17:35:35 crc kubenswrapper[4753]: I1205 17:35:35.441770 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/870a2370-eaf3-4682-9bf9-712d62c24e28-scripts\") pod \"870a2370-eaf3-4682-9bf9-712d62c24e28\" (UID: \"870a2370-eaf3-4682-9bf9-712d62c24e28\") " Dec 05 17:35:35 crc kubenswrapper[4753]: I1205 17:35:35.442034 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6f7bb1ce-2b18-4809-8632-596a297f364b\") pod \"870a2370-eaf3-4682-9bf9-712d62c24e28\" (UID: \"870a2370-eaf3-4682-9bf9-712d62c24e28\") " Dec 05 17:35:35 crc kubenswrapper[4753]: I1205 17:35:35.442073 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/870a2370-eaf3-4682-9bf9-712d62c24e28-config-data\") pod \"870a2370-eaf3-4682-9bf9-712d62c24e28\" (UID: \"870a2370-eaf3-4682-9bf9-712d62c24e28\") " Dec 05 17:35:35 crc kubenswrapper[4753]: I1205 17:35:35.442220 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/870a2370-eaf3-4682-9bf9-712d62c24e28-internal-tls-certs\") pod \"870a2370-eaf3-4682-9bf9-712d62c24e28\" (UID: \"870a2370-eaf3-4682-9bf9-712d62c24e28\") " Dec 05 17:35:35 crc kubenswrapper[4753]: I1205 17:35:35.442248 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/870a2370-eaf3-4682-9bf9-712d62c24e28-httpd-run\") pod \"870a2370-eaf3-4682-9bf9-712d62c24e28\" (UID: \"870a2370-eaf3-4682-9bf9-712d62c24e28\") " Dec 05 17:35:35 crc kubenswrapper[4753]: I1205 17:35:35.442303 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/870a2370-eaf3-4682-9bf9-712d62c24e28-combined-ca-bundle\") pod \"870a2370-eaf3-4682-9bf9-712d62c24e28\" (UID: \"870a2370-eaf3-4682-9bf9-712d62c24e28\") " Dec 05 17:35:35 crc kubenswrapper[4753]: I1205 17:35:35.445035 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/870a2370-eaf3-4682-9bf9-712d62c24e28-logs" (OuterVolumeSpecName: "logs") pod "870a2370-eaf3-4682-9bf9-712d62c24e28" (UID: "870a2370-eaf3-4682-9bf9-712d62c24e28"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:35:35 crc kubenswrapper[4753]: I1205 17:35:35.446576 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/870a2370-eaf3-4682-9bf9-712d62c24e28-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "870a2370-eaf3-4682-9bf9-712d62c24e28" (UID: "870a2370-eaf3-4682-9bf9-712d62c24e28"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:35:35 crc kubenswrapper[4753]: I1205 17:35:35.470201 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6f7bb1ce-2b18-4809-8632-596a297f364b" (OuterVolumeSpecName: "glance") pod "870a2370-eaf3-4682-9bf9-712d62c24e28" (UID: "870a2370-eaf3-4682-9bf9-712d62c24e28"). InnerVolumeSpecName "pvc-6f7bb1ce-2b18-4809-8632-596a297f364b". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 05 17:35:35 crc kubenswrapper[4753]: I1205 17:35:35.476545 4753 generic.go:334] "Generic (PLEG): container finished" podID="870a2370-eaf3-4682-9bf9-712d62c24e28" containerID="bf901a23935a1fb981155a48242300c7930db807182884301ccbfb3a25e80644" exitCode=0 Dec 05 17:35:35 crc kubenswrapper[4753]: I1205 17:35:35.476640 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"870a2370-eaf3-4682-9bf9-712d62c24e28","Type":"ContainerDied","Data":"bf901a23935a1fb981155a48242300c7930db807182884301ccbfb3a25e80644"} Dec 05 17:35:35 crc kubenswrapper[4753]: I1205 17:35:35.476680 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"870a2370-eaf3-4682-9bf9-712d62c24e28","Type":"ContainerDied","Data":"cbe7223ae8193ebc0e0ad1431c54cb3a41573407b1e7f2b3bd8a761d91e72011"} Dec 05 17:35:35 crc kubenswrapper[4753]: I1205 17:35:35.476700 4753 scope.go:117] "RemoveContainer" containerID="bf901a23935a1fb981155a48242300c7930db807182884301ccbfb3a25e80644" Dec 05 17:35:35 crc kubenswrapper[4753]: I1205 17:35:35.476863 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 17:35:35 crc kubenswrapper[4753]: I1205 17:35:35.525383 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/870a2370-eaf3-4682-9bf9-712d62c24e28-scripts" (OuterVolumeSpecName: "scripts") pod "870a2370-eaf3-4682-9bf9-712d62c24e28" (UID: "870a2370-eaf3-4682-9bf9-712d62c24e28"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:35:35 crc kubenswrapper[4753]: I1205 17:35:35.525652 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/870a2370-eaf3-4682-9bf9-712d62c24e28-kube-api-access-nxh7m" (OuterVolumeSpecName: "kube-api-access-nxh7m") pod "870a2370-eaf3-4682-9bf9-712d62c24e28" (UID: "870a2370-eaf3-4682-9bf9-712d62c24e28"). InnerVolumeSpecName "kube-api-access-nxh7m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:35:35 crc kubenswrapper[4753]: I1205 17:35:35.545272 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nxh7m\" (UniqueName: \"kubernetes.io/projected/870a2370-eaf3-4682-9bf9-712d62c24e28-kube-api-access-nxh7m\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:35 crc kubenswrapper[4753]: I1205 17:35:35.545321 4753 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/870a2370-eaf3-4682-9bf9-712d62c24e28-logs\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:35 crc kubenswrapper[4753]: I1205 17:35:35.545352 4753 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/870a2370-eaf3-4682-9bf9-712d62c24e28-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:35 crc kubenswrapper[4753]: I1205 17:35:35.545832 4753 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-6f7bb1ce-2b18-4809-8632-596a297f364b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6f7bb1ce-2b18-4809-8632-596a297f364b\") on node \"crc\" " Dec 05 17:35:35 crc kubenswrapper[4753]: I1205 17:35:35.545911 4753 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/870a2370-eaf3-4682-9bf9-712d62c24e28-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:35 crc kubenswrapper[4753]: I1205 17:35:35.557063 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/870a2370-eaf3-4682-9bf9-712d62c24e28-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "870a2370-eaf3-4682-9bf9-712d62c24e28" (UID: "870a2370-eaf3-4682-9bf9-712d62c24e28"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:35:35 crc kubenswrapper[4753]: I1205 17:35:35.586742 4753 scope.go:117] "RemoveContainer" containerID="1823306ef1c79c57969ca58985ac954e1e0ec367d52bab894774055565525547" Dec 05 17:35:35 crc kubenswrapper[4753]: I1205 17:35:35.600845 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/870a2370-eaf3-4682-9bf9-712d62c24e28-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "870a2370-eaf3-4682-9bf9-712d62c24e28" (UID: "870a2370-eaf3-4682-9bf9-712d62c24e28"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:35:35 crc kubenswrapper[4753]: I1205 17:35:35.611386 4753 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Dec 05 17:35:35 crc kubenswrapper[4753]: I1205 17:35:35.611583 4753 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-6f7bb1ce-2b18-4809-8632-596a297f364b" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6f7bb1ce-2b18-4809-8632-596a297f364b") on node "crc" Dec 05 17:35:35 crc kubenswrapper[4753]: I1205 17:35:35.647618 4753 reconciler_common.go:293] "Volume detached for volume \"pvc-6f7bb1ce-2b18-4809-8632-596a297f364b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6f7bb1ce-2b18-4809-8632-596a297f364b\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:35 crc kubenswrapper[4753]: I1205 17:35:35.647649 4753 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/870a2370-eaf3-4682-9bf9-712d62c24e28-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:35 crc kubenswrapper[4753]: I1205 17:35:35.647662 4753 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/870a2370-eaf3-4682-9bf9-712d62c24e28-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:35 crc kubenswrapper[4753]: I1205 17:35:35.661245 4753 scope.go:117] "RemoveContainer" containerID="bf901a23935a1fb981155a48242300c7930db807182884301ccbfb3a25e80644" Dec 05 17:35:35 crc kubenswrapper[4753]: E1205 17:35:35.671545 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bf901a23935a1fb981155a48242300c7930db807182884301ccbfb3a25e80644\": container with ID starting with bf901a23935a1fb981155a48242300c7930db807182884301ccbfb3a25e80644 not found: ID does not exist" containerID="bf901a23935a1fb981155a48242300c7930db807182884301ccbfb3a25e80644" Dec 05 17:35:35 crc kubenswrapper[4753]: I1205 17:35:35.671593 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bf901a23935a1fb981155a48242300c7930db807182884301ccbfb3a25e80644"} err="failed to get container status \"bf901a23935a1fb981155a48242300c7930db807182884301ccbfb3a25e80644\": rpc error: code = NotFound desc = could not find container \"bf901a23935a1fb981155a48242300c7930db807182884301ccbfb3a25e80644\": container with ID starting with bf901a23935a1fb981155a48242300c7930db807182884301ccbfb3a25e80644 not found: ID does not exist" Dec 05 17:35:35 crc kubenswrapper[4753]: I1205 17:35:35.671620 4753 scope.go:117] "RemoveContainer" containerID="1823306ef1c79c57969ca58985ac954e1e0ec367d52bab894774055565525547" Dec 05 17:35:35 crc kubenswrapper[4753]: E1205 17:35:35.672213 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1823306ef1c79c57969ca58985ac954e1e0ec367d52bab894774055565525547\": container with ID starting with 1823306ef1c79c57969ca58985ac954e1e0ec367d52bab894774055565525547 not found: ID does not exist" containerID="1823306ef1c79c57969ca58985ac954e1e0ec367d52bab894774055565525547" Dec 05 17:35:35 crc kubenswrapper[4753]: I1205 17:35:35.672258 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1823306ef1c79c57969ca58985ac954e1e0ec367d52bab894774055565525547"} err="failed to get container status \"1823306ef1c79c57969ca58985ac954e1e0ec367d52bab894774055565525547\": rpc error: code = NotFound desc = could not find container \"1823306ef1c79c57969ca58985ac954e1e0ec367d52bab894774055565525547\": container with ID starting with 1823306ef1c79c57969ca58985ac954e1e0ec367d52bab894774055565525547 not found: ID does not exist" Dec 05 17:35:35 crc kubenswrapper[4753]: I1205 17:35:35.675868 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/870a2370-eaf3-4682-9bf9-712d62c24e28-config-data" (OuterVolumeSpecName: "config-data") pod "870a2370-eaf3-4682-9bf9-712d62c24e28" (UID: "870a2370-eaf3-4682-9bf9-712d62c24e28"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:35:35 crc kubenswrapper[4753]: I1205 17:35:35.734984 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8e5b52fd-87d5-4912-a8b2-305d4271a5ba" path="/var/lib/kubelet/pods/8e5b52fd-87d5-4912-a8b2-305d4271a5ba/volumes" Dec 05 17:35:35 crc kubenswrapper[4753]: I1205 17:35:35.749370 4753 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/870a2370-eaf3-4682-9bf9-712d62c24e28-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:35 crc kubenswrapper[4753]: I1205 17:35:35.860353 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 17:35:35 crc kubenswrapper[4753]: I1205 17:35:35.887233 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 17:35:35 crc kubenswrapper[4753]: I1205 17:35:35.904080 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-ae67-account-create-update-lxk95"] Dec 05 17:35:35 crc kubenswrapper[4753]: I1205 17:35:35.914285 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 17:35:35 crc kubenswrapper[4753]: E1205 17:35:35.914888 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="870a2370-eaf3-4682-9bf9-712d62c24e28" containerName="glance-httpd" Dec 05 17:35:35 crc kubenswrapper[4753]: I1205 17:35:35.914905 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="870a2370-eaf3-4682-9bf9-712d62c24e28" containerName="glance-httpd" Dec 05 17:35:35 crc kubenswrapper[4753]: E1205 17:35:35.914961 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="870a2370-eaf3-4682-9bf9-712d62c24e28" containerName="glance-log" Dec 05 17:35:35 crc kubenswrapper[4753]: I1205 17:35:35.914970 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="870a2370-eaf3-4682-9bf9-712d62c24e28" containerName="glance-log" Dec 05 17:35:35 crc kubenswrapper[4753]: I1205 17:35:35.915221 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="870a2370-eaf3-4682-9bf9-712d62c24e28" containerName="glance-httpd" Dec 05 17:35:35 crc kubenswrapper[4753]: I1205 17:35:35.915261 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="870a2370-eaf3-4682-9bf9-712d62c24e28" containerName="glance-log" Dec 05 17:35:35 crc kubenswrapper[4753]: I1205 17:35:35.916625 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 17:35:35 crc kubenswrapper[4753]: I1205 17:35:35.921610 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 05 17:35:35 crc kubenswrapper[4753]: I1205 17:35:35.921837 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 05 17:35:35 crc kubenswrapper[4753]: I1205 17:35:35.924211 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 17:35:35 crc kubenswrapper[4753]: I1205 17:35:35.960020 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-7rkhf"] Dec 05 17:35:35 crc kubenswrapper[4753]: I1205 17:35:35.973350 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-ncw7r"] Dec 05 17:35:35 crc kubenswrapper[4753]: W1205 17:35:35.982784 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb33874b7_5966_44b0_9a9c_a555e52127b9.slice/crio-64ae1a085a1278d3668e862e5f45daf58be6492d0a5512d9bc2806d3729a3d27 WatchSource:0}: Error finding container 64ae1a085a1278d3668e862e5f45daf58be6492d0a5512d9bc2806d3729a3d27: Status 404 returned error can't find the container with id 64ae1a085a1278d3668e862e5f45daf58be6492d0a5512d9bc2806d3729a3d27 Dec 05 17:35:35 crc kubenswrapper[4753]: W1205 17:35:35.984401 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8b231cbb_8d95_4948_aba8_825809d77fa7.slice/crio-dcc835b972982ba9c07d11946943f849a262e7bb3dd532847b1c9149dcd8f1f3 WatchSource:0}: Error finding container dcc835b972982ba9c07d11946943f849a262e7bb3dd532847b1c9149dcd8f1f3: Status 404 returned error can't find the container with id dcc835b972982ba9c07d11946943f849a262e7bb3dd532847b1c9149dcd8f1f3 Dec 05 17:35:36 crc kubenswrapper[4753]: W1205 17:35:36.004282 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod762045ba_049a_471e_b6dc_ac74b0c28bfa.slice/crio-45a306a872fb9b6d223392f0bd6b05c479d7d90d9b427bd7e7f0b1bcea8fd1e4 WatchSource:0}: Error finding container 45a306a872fb9b6d223392f0bd6b05c479d7d90d9b427bd7e7f0b1bcea8fd1e4: Status 404 returned error can't find the container with id 45a306a872fb9b6d223392f0bd6b05c479d7d90d9b427bd7e7f0b1bcea8fd1e4 Dec 05 17:35:36 crc kubenswrapper[4753]: I1205 17:35:36.005619 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-qvb5q"] Dec 05 17:35:36 crc kubenswrapper[4753]: I1205 17:35:36.059813 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-6f7bb1ce-2b18-4809-8632-596a297f364b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6f7bb1ce-2b18-4809-8632-596a297f364b\") pod \"glance-default-internal-api-0\" (UID: \"33797bcf-e2f2-4a28-8148-3e027fc342d8\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:35:36 crc kubenswrapper[4753]: I1205 17:35:36.059875 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/33797bcf-e2f2-4a28-8148-3e027fc342d8-logs\") pod \"glance-default-internal-api-0\" (UID: \"33797bcf-e2f2-4a28-8148-3e027fc342d8\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:35:36 crc kubenswrapper[4753]: I1205 17:35:36.059942 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/33797bcf-e2f2-4a28-8148-3e027fc342d8-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"33797bcf-e2f2-4a28-8148-3e027fc342d8\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:35:36 crc kubenswrapper[4753]: I1205 17:35:36.059996 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/33797bcf-e2f2-4a28-8148-3e027fc342d8-config-data\") pod \"glance-default-internal-api-0\" (UID: \"33797bcf-e2f2-4a28-8148-3e027fc342d8\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:35:36 crc kubenswrapper[4753]: I1205 17:35:36.060042 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/33797bcf-e2f2-4a28-8148-3e027fc342d8-scripts\") pod \"glance-default-internal-api-0\" (UID: \"33797bcf-e2f2-4a28-8148-3e027fc342d8\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:35:36 crc kubenswrapper[4753]: I1205 17:35:36.060109 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33797bcf-e2f2-4a28-8148-3e027fc342d8-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"33797bcf-e2f2-4a28-8148-3e027fc342d8\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:35:36 crc kubenswrapper[4753]: I1205 17:35:36.060185 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/33797bcf-e2f2-4a28-8148-3e027fc342d8-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"33797bcf-e2f2-4a28-8148-3e027fc342d8\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:35:36 crc kubenswrapper[4753]: I1205 17:35:36.060247 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dx6hf\" (UniqueName: \"kubernetes.io/projected/33797bcf-e2f2-4a28-8148-3e027fc342d8-kube-api-access-dx6hf\") pod \"glance-default-internal-api-0\" (UID: \"33797bcf-e2f2-4a28-8148-3e027fc342d8\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:35:36 crc kubenswrapper[4753]: I1205 17:35:36.161807 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dx6hf\" (UniqueName: \"kubernetes.io/projected/33797bcf-e2f2-4a28-8148-3e027fc342d8-kube-api-access-dx6hf\") pod \"glance-default-internal-api-0\" (UID: \"33797bcf-e2f2-4a28-8148-3e027fc342d8\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:35:36 crc kubenswrapper[4753]: I1205 17:35:36.161902 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-6f7bb1ce-2b18-4809-8632-596a297f364b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6f7bb1ce-2b18-4809-8632-596a297f364b\") pod \"glance-default-internal-api-0\" (UID: \"33797bcf-e2f2-4a28-8148-3e027fc342d8\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:35:36 crc kubenswrapper[4753]: I1205 17:35:36.161939 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/33797bcf-e2f2-4a28-8148-3e027fc342d8-logs\") pod \"glance-default-internal-api-0\" (UID: \"33797bcf-e2f2-4a28-8148-3e027fc342d8\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:35:36 crc kubenswrapper[4753]: I1205 17:35:36.162002 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/33797bcf-e2f2-4a28-8148-3e027fc342d8-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"33797bcf-e2f2-4a28-8148-3e027fc342d8\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:35:36 crc kubenswrapper[4753]: I1205 17:35:36.162051 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/33797bcf-e2f2-4a28-8148-3e027fc342d8-config-data\") pod \"glance-default-internal-api-0\" (UID: \"33797bcf-e2f2-4a28-8148-3e027fc342d8\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:35:36 crc kubenswrapper[4753]: I1205 17:35:36.162096 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/33797bcf-e2f2-4a28-8148-3e027fc342d8-scripts\") pod \"glance-default-internal-api-0\" (UID: \"33797bcf-e2f2-4a28-8148-3e027fc342d8\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:35:36 crc kubenswrapper[4753]: I1205 17:35:36.162202 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33797bcf-e2f2-4a28-8148-3e027fc342d8-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"33797bcf-e2f2-4a28-8148-3e027fc342d8\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:35:36 crc kubenswrapper[4753]: I1205 17:35:36.162261 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/33797bcf-e2f2-4a28-8148-3e027fc342d8-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"33797bcf-e2f2-4a28-8148-3e027fc342d8\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:35:36 crc kubenswrapper[4753]: I1205 17:35:36.162618 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/33797bcf-e2f2-4a28-8148-3e027fc342d8-logs\") pod \"glance-default-internal-api-0\" (UID: \"33797bcf-e2f2-4a28-8148-3e027fc342d8\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:35:36 crc kubenswrapper[4753]: I1205 17:35:36.164395 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/33797bcf-e2f2-4a28-8148-3e027fc342d8-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"33797bcf-e2f2-4a28-8148-3e027fc342d8\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:35:36 crc kubenswrapper[4753]: I1205 17:35:36.172657 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/33797bcf-e2f2-4a28-8148-3e027fc342d8-scripts\") pod \"glance-default-internal-api-0\" (UID: \"33797bcf-e2f2-4a28-8148-3e027fc342d8\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:35:36 crc kubenswrapper[4753]: I1205 17:35:36.176961 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/33797bcf-e2f2-4a28-8148-3e027fc342d8-config-data\") pod \"glance-default-internal-api-0\" (UID: \"33797bcf-e2f2-4a28-8148-3e027fc342d8\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:35:36 crc kubenswrapper[4753]: I1205 17:35:36.181948 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/33797bcf-e2f2-4a28-8148-3e027fc342d8-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"33797bcf-e2f2-4a28-8148-3e027fc342d8\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:35:36 crc kubenswrapper[4753]: I1205 17:35:36.184047 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/33797bcf-e2f2-4a28-8148-3e027fc342d8-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"33797bcf-e2f2-4a28-8148-3e027fc342d8\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:35:36 crc kubenswrapper[4753]: I1205 17:35:36.194801 4753 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 05 17:35:36 crc kubenswrapper[4753]: I1205 17:35:36.194851 4753 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-6f7bb1ce-2b18-4809-8632-596a297f364b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6f7bb1ce-2b18-4809-8632-596a297f364b\") pod \"glance-default-internal-api-0\" (UID: \"33797bcf-e2f2-4a28-8148-3e027fc342d8\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/df7f219cf3246d8ce07ad766614ffac20d7c8f72baddde817ee73d8a655238aa/globalmount\"" pod="openstack/glance-default-internal-api-0" Dec 05 17:35:36 crc kubenswrapper[4753]: I1205 17:35:36.210582 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dx6hf\" (UniqueName: \"kubernetes.io/projected/33797bcf-e2f2-4a28-8148-3e027fc342d8-kube-api-access-dx6hf\") pod \"glance-default-internal-api-0\" (UID: \"33797bcf-e2f2-4a28-8148-3e027fc342d8\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:35:36 crc kubenswrapper[4753]: I1205 17:35:36.254383 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-080f-account-create-update-hsplf"] Dec 05 17:35:36 crc kubenswrapper[4753]: I1205 17:35:36.274440 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-134b-account-create-update-qf8qh"] Dec 05 17:35:36 crc kubenswrapper[4753]: I1205 17:35:36.294078 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-6f7bb1ce-2b18-4809-8632-596a297f364b\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-6f7bb1ce-2b18-4809-8632-596a297f364b\") pod \"glance-default-internal-api-0\" (UID: \"33797bcf-e2f2-4a28-8148-3e027fc342d8\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:35:36 crc kubenswrapper[4753]: I1205 17:35:36.313228 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 17:35:36 crc kubenswrapper[4753]: W1205 17:35:36.331388 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod187c344a_5fdf_47db_b103_de9458e6a58a.slice/crio-9901e6f1fbc9b3429a8598ab494ec75f9e7688373d1ded8cece66f6ddad0220d WatchSource:0}: Error finding container 9901e6f1fbc9b3429a8598ab494ec75f9e7688373d1ded8cece66f6ddad0220d: Status 404 returned error can't find the container with id 9901e6f1fbc9b3429a8598ab494ec75f9e7688373d1ded8cece66f6ddad0220d Dec 05 17:35:36 crc kubenswrapper[4753]: I1205 17:35:36.403676 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 17:35:36 crc kubenswrapper[4753]: I1205 17:35:36.554710 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-ae67-account-create-update-lxk95" event={"ID":"d9465bae-5a66-4ab6-956b-1258eb08db35","Type":"ContainerStarted","Data":"c81edf8500118e047a560e89ae9db6aa858a333b949cf7faea5dca3fa5d4756f"} Dec 05 17:35:36 crc kubenswrapper[4753]: I1205 17:35:36.554887 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-ae67-account-create-update-lxk95" event={"ID":"d9465bae-5a66-4ab6-956b-1258eb08db35","Type":"ContainerStarted","Data":"c053a2888000b4f0212cc48f8772f2bffde1d7e189c44d06ab17309d953eee68"} Dec 05 17:35:36 crc kubenswrapper[4753]: I1205 17:35:36.574104 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-qvb5q" event={"ID":"762045ba-049a-471e-b6dc-ac74b0c28bfa","Type":"ContainerStarted","Data":"45a306a872fb9b6d223392f0bd6b05c479d7d90d9b427bd7e7f0b1bcea8fd1e4"} Dec 05 17:35:36 crc kubenswrapper[4753]: I1205 17:35:36.592479 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-134b-account-create-update-qf8qh" event={"ID":"de05c77d-b326-4c0c-996f-a78f35ae6694","Type":"ContainerStarted","Data":"a71c865f17a2f1faf4cf2f18d727b1f459837025ce9e6f42fea141b14ccf54f5"} Dec 05 17:35:36 crc kubenswrapper[4753]: I1205 17:35:36.599439 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-080f-account-create-update-hsplf" event={"ID":"2f79146b-e55c-4ccf-93b3-91829167768b","Type":"ContainerStarted","Data":"b4210f6662db0df12c722a895a87e8808b22ab34a1abb8faa0b89bff57f332a6"} Dec 05 17:35:36 crc kubenswrapper[4753]: I1205 17:35:36.600831 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-ae67-account-create-update-lxk95" podStartSLOduration=2.600807972 podStartE2EDuration="2.600807972s" podCreationTimestamp="2025-12-05 17:35:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:35:36.580640749 +0000 UTC m=+1875.083747745" watchObservedRunningTime="2025-12-05 17:35:36.600807972 +0000 UTC m=+1875.103914978" Dec 05 17:35:36 crc kubenswrapper[4753]: I1205 17:35:36.606949 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-db-create-qvb5q" podStartSLOduration=2.606929046 podStartE2EDuration="2.606929046s" podCreationTimestamp="2025-12-05 17:35:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:35:36.602679305 +0000 UTC m=+1875.105786311" watchObservedRunningTime="2025-12-05 17:35:36.606929046 +0000 UTC m=+1875.110036052" Dec 05 17:35:36 crc kubenswrapper[4753]: I1205 17:35:36.611504 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-ncw7r" event={"ID":"8b231cbb-8d95-4948-aba8-825809d77fa7","Type":"ContainerStarted","Data":"11ee963d9277da03301c99cb1ed7e9f5ddf7286cc262069817bd8138af0ee490"} Dec 05 17:35:36 crc kubenswrapper[4753]: I1205 17:35:36.611584 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-ncw7r" event={"ID":"8b231cbb-8d95-4948-aba8-825809d77fa7","Type":"ContainerStarted","Data":"dcc835b972982ba9c07d11946943f849a262e7bb3dd532847b1c9149dcd8f1f3"} Dec 05 17:35:36 crc kubenswrapper[4753]: I1205 17:35:36.620095 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"187c344a-5fdf-47db-b103-de9458e6a58a","Type":"ContainerStarted","Data":"9901e6f1fbc9b3429a8598ab494ec75f9e7688373d1ded8cece66f6ddad0220d"} Dec 05 17:35:36 crc kubenswrapper[4753]: I1205 17:35:36.626423 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-7rkhf" event={"ID":"b33874b7-5966-44b0-9a9c-a555e52127b9","Type":"ContainerStarted","Data":"83d17ce61da5fa359a7f1c3eb57d36f1a3f0fd913b9f999b59d4e1fc201fa205"} Dec 05 17:35:36 crc kubenswrapper[4753]: I1205 17:35:36.626467 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-7rkhf" event={"ID":"b33874b7-5966-44b0-9a9c-a555e52127b9","Type":"ContainerStarted","Data":"64ae1a085a1278d3668e862e5f45daf58be6492d0a5512d9bc2806d3729a3d27"} Dec 05 17:35:36 crc kubenswrapper[4753]: I1205 17:35:36.626843 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-080f-account-create-update-hsplf" podStartSLOduration=2.62682448 podStartE2EDuration="2.62682448s" podCreationTimestamp="2025-12-05 17:35:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:35:36.619019319 +0000 UTC m=+1875.122126315" watchObservedRunningTime="2025-12-05 17:35:36.62682448 +0000 UTC m=+1875.129931486" Dec 05 17:35:36 crc kubenswrapper[4753]: I1205 17:35:36.662784 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-db-create-ncw7r" podStartSLOduration=2.662762821 podStartE2EDuration="2.662762821s" podCreationTimestamp="2025-12-05 17:35:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:35:36.642448204 +0000 UTC m=+1875.145555210" watchObservedRunningTime="2025-12-05 17:35:36.662762821 +0000 UTC m=+1875.165869827" Dec 05 17:35:36 crc kubenswrapper[4753]: I1205 17:35:36.700730 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-db-create-7rkhf" podStartSLOduration=2.700708028 podStartE2EDuration="2.700708028s" podCreationTimestamp="2025-12-05 17:35:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:35:36.66733442 +0000 UTC m=+1875.170441436" watchObservedRunningTime="2025-12-05 17:35:36.700708028 +0000 UTC m=+1875.203815034" Dec 05 17:35:37 crc kubenswrapper[4753]: W1205 17:35:37.158588 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod33797bcf_e2f2_4a28_8148_3e027fc342d8.slice/crio-c8a540c0254a4ed74c41c1d2cc0e436705c455680f7e08f1fbe323921d24f53c WatchSource:0}: Error finding container c8a540c0254a4ed74c41c1d2cc0e436705c455680f7e08f1fbe323921d24f53c: Status 404 returned error can't find the container with id c8a540c0254a4ed74c41c1d2cc0e436705c455680f7e08f1fbe323921d24f53c Dec 05 17:35:37 crc kubenswrapper[4753]: I1205 17:35:37.159452 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 17:35:37 crc kubenswrapper[4753]: I1205 17:35:37.445735 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-6d854f58c-mvlpx" Dec 05 17:35:37 crc kubenswrapper[4753]: I1205 17:35:37.456523 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-6d854f58c-mvlpx" Dec 05 17:35:37 crc kubenswrapper[4753]: I1205 17:35:37.690085 4753 generic.go:334] "Generic (PLEG): container finished" podID="b33874b7-5966-44b0-9a9c-a555e52127b9" containerID="83d17ce61da5fa359a7f1c3eb57d36f1a3f0fd913b9f999b59d4e1fc201fa205" exitCode=0 Dec 05 17:35:37 crc kubenswrapper[4753]: I1205 17:35:37.690482 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-7rkhf" event={"ID":"b33874b7-5966-44b0-9a9c-a555e52127b9","Type":"ContainerDied","Data":"83d17ce61da5fa359a7f1c3eb57d36f1a3f0fd913b9f999b59d4e1fc201fa205"} Dec 05 17:35:37 crc kubenswrapper[4753]: I1205 17:35:37.694944 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"33797bcf-e2f2-4a28-8148-3e027fc342d8","Type":"ContainerStarted","Data":"c8a540c0254a4ed74c41c1d2cc0e436705c455680f7e08f1fbe323921d24f53c"} Dec 05 17:35:37 crc kubenswrapper[4753]: I1205 17:35:37.715351 4753 generic.go:334] "Generic (PLEG): container finished" podID="d9465bae-5a66-4ab6-956b-1258eb08db35" containerID="c81edf8500118e047a560e89ae9db6aa858a333b949cf7faea5dca3fa5d4756f" exitCode=0 Dec 05 17:35:37 crc kubenswrapper[4753]: I1205 17:35:37.715426 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-ae67-account-create-update-lxk95" event={"ID":"d9465bae-5a66-4ab6-956b-1258eb08db35","Type":"ContainerDied","Data":"c81edf8500118e047a560e89ae9db6aa858a333b949cf7faea5dca3fa5d4756f"} Dec 05 17:35:37 crc kubenswrapper[4753]: I1205 17:35:37.722108 4753 generic.go:334] "Generic (PLEG): container finished" podID="762045ba-049a-471e-b6dc-ac74b0c28bfa" containerID="abf59d661542894af680c2f2c771713ff34a2f765563f33ac37718a60faa7e2c" exitCode=0 Dec 05 17:35:37 crc kubenswrapper[4753]: I1205 17:35:37.725372 4753 generic.go:334] "Generic (PLEG): container finished" podID="de05c77d-b326-4c0c-996f-a78f35ae6694" containerID="852c80aed050ae6afcd79a565b028380534b80ba8c6b19da7ea2df168102a5d6" exitCode=0 Dec 05 17:35:37 crc kubenswrapper[4753]: I1205 17:35:37.728614 4753 generic.go:334] "Generic (PLEG): container finished" podID="2f79146b-e55c-4ccf-93b3-91829167768b" containerID="1e432b8956b70c86da42c88ebc4f91ad24587d8748b9b21ab70af6fd9b21e073" exitCode=0 Dec 05 17:35:37 crc kubenswrapper[4753]: I1205 17:35:37.735977 4753 generic.go:334] "Generic (PLEG): container finished" podID="8b231cbb-8d95-4948-aba8-825809d77fa7" containerID="11ee963d9277da03301c99cb1ed7e9f5ddf7286cc262069817bd8138af0ee490" exitCode=0 Dec 05 17:35:37 crc kubenswrapper[4753]: I1205 17:35:37.738568 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="870a2370-eaf3-4682-9bf9-712d62c24e28" path="/var/lib/kubelet/pods/870a2370-eaf3-4682-9bf9-712d62c24e28/volumes" Dec 05 17:35:37 crc kubenswrapper[4753]: I1205 17:35:37.739607 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-qvb5q" event={"ID":"762045ba-049a-471e-b6dc-ac74b0c28bfa","Type":"ContainerDied","Data":"abf59d661542894af680c2f2c771713ff34a2f765563f33ac37718a60faa7e2c"} Dec 05 17:35:37 crc kubenswrapper[4753]: I1205 17:35:37.739647 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-134b-account-create-update-qf8qh" event={"ID":"de05c77d-b326-4c0c-996f-a78f35ae6694","Type":"ContainerDied","Data":"852c80aed050ae6afcd79a565b028380534b80ba8c6b19da7ea2df168102a5d6"} Dec 05 17:35:37 crc kubenswrapper[4753]: I1205 17:35:37.739664 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-080f-account-create-update-hsplf" event={"ID":"2f79146b-e55c-4ccf-93b3-91829167768b","Type":"ContainerDied","Data":"1e432b8956b70c86da42c88ebc4f91ad24587d8748b9b21ab70af6fd9b21e073"} Dec 05 17:35:37 crc kubenswrapper[4753]: I1205 17:35:37.739679 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-ncw7r" event={"ID":"8b231cbb-8d95-4948-aba8-825809d77fa7","Type":"ContainerDied","Data":"11ee963d9277da03301c99cb1ed7e9f5ddf7286cc262069817bd8138af0ee490"} Dec 05 17:35:37 crc kubenswrapper[4753]: I1205 17:35:37.745237 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"187c344a-5fdf-47db-b103-de9458e6a58a","Type":"ContainerStarted","Data":"6f8e577a54f1078d1cc7e578f58a733630ae4e0aca54ad262666a77e3b03ed52"} Dec 05 17:35:38 crc kubenswrapper[4753]: I1205 17:35:38.464557 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 17:35:38 crc kubenswrapper[4753]: I1205 17:35:38.532699 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/02bce72e-5cac-4c12-b1ba-e08f2b95cbe2-run-httpd\") pod \"02bce72e-5cac-4c12-b1ba-e08f2b95cbe2\" (UID: \"02bce72e-5cac-4c12-b1ba-e08f2b95cbe2\") " Dec 05 17:35:38 crc kubenswrapper[4753]: I1205 17:35:38.532804 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/02bce72e-5cac-4c12-b1ba-e08f2b95cbe2-scripts\") pod \"02bce72e-5cac-4c12-b1ba-e08f2b95cbe2\" (UID: \"02bce72e-5cac-4c12-b1ba-e08f2b95cbe2\") " Dec 05 17:35:38 crc kubenswrapper[4753]: I1205 17:35:38.532859 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02bce72e-5cac-4c12-b1ba-e08f2b95cbe2-config-data\") pod \"02bce72e-5cac-4c12-b1ba-e08f2b95cbe2\" (UID: \"02bce72e-5cac-4c12-b1ba-e08f2b95cbe2\") " Dec 05 17:35:38 crc kubenswrapper[4753]: I1205 17:35:38.532907 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/02bce72e-5cac-4c12-b1ba-e08f2b95cbe2-log-httpd\") pod \"02bce72e-5cac-4c12-b1ba-e08f2b95cbe2\" (UID: \"02bce72e-5cac-4c12-b1ba-e08f2b95cbe2\") " Dec 05 17:35:38 crc kubenswrapper[4753]: I1205 17:35:38.533002 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/02bce72e-5cac-4c12-b1ba-e08f2b95cbe2-sg-core-conf-yaml\") pod \"02bce72e-5cac-4c12-b1ba-e08f2b95cbe2\" (UID: \"02bce72e-5cac-4c12-b1ba-e08f2b95cbe2\") " Dec 05 17:35:38 crc kubenswrapper[4753]: I1205 17:35:38.533027 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02bce72e-5cac-4c12-b1ba-e08f2b95cbe2-combined-ca-bundle\") pod \"02bce72e-5cac-4c12-b1ba-e08f2b95cbe2\" (UID: \"02bce72e-5cac-4c12-b1ba-e08f2b95cbe2\") " Dec 05 17:35:38 crc kubenswrapper[4753]: I1205 17:35:38.533135 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zpqdq\" (UniqueName: \"kubernetes.io/projected/02bce72e-5cac-4c12-b1ba-e08f2b95cbe2-kube-api-access-zpqdq\") pod \"02bce72e-5cac-4c12-b1ba-e08f2b95cbe2\" (UID: \"02bce72e-5cac-4c12-b1ba-e08f2b95cbe2\") " Dec 05 17:35:38 crc kubenswrapper[4753]: I1205 17:35:38.536037 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/02bce72e-5cac-4c12-b1ba-e08f2b95cbe2-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "02bce72e-5cac-4c12-b1ba-e08f2b95cbe2" (UID: "02bce72e-5cac-4c12-b1ba-e08f2b95cbe2"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:35:38 crc kubenswrapper[4753]: I1205 17:35:38.536184 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/02bce72e-5cac-4c12-b1ba-e08f2b95cbe2-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "02bce72e-5cac-4c12-b1ba-e08f2b95cbe2" (UID: "02bce72e-5cac-4c12-b1ba-e08f2b95cbe2"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:35:38 crc kubenswrapper[4753]: I1205 17:35:38.551804 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/02bce72e-5cac-4c12-b1ba-e08f2b95cbe2-kube-api-access-zpqdq" (OuterVolumeSpecName: "kube-api-access-zpqdq") pod "02bce72e-5cac-4c12-b1ba-e08f2b95cbe2" (UID: "02bce72e-5cac-4c12-b1ba-e08f2b95cbe2"). InnerVolumeSpecName "kube-api-access-zpqdq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:35:38 crc kubenswrapper[4753]: I1205 17:35:38.557099 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02bce72e-5cac-4c12-b1ba-e08f2b95cbe2-scripts" (OuterVolumeSpecName: "scripts") pod "02bce72e-5cac-4c12-b1ba-e08f2b95cbe2" (UID: "02bce72e-5cac-4c12-b1ba-e08f2b95cbe2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:35:38 crc kubenswrapper[4753]: I1205 17:35:38.590239 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02bce72e-5cac-4c12-b1ba-e08f2b95cbe2-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "02bce72e-5cac-4c12-b1ba-e08f2b95cbe2" (UID: "02bce72e-5cac-4c12-b1ba-e08f2b95cbe2"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:35:38 crc kubenswrapper[4753]: I1205 17:35:38.624933 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02bce72e-5cac-4c12-b1ba-e08f2b95cbe2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "02bce72e-5cac-4c12-b1ba-e08f2b95cbe2" (UID: "02bce72e-5cac-4c12-b1ba-e08f2b95cbe2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:35:38 crc kubenswrapper[4753]: I1205 17:35:38.635851 4753 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/02bce72e-5cac-4c12-b1ba-e08f2b95cbe2-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:38 crc kubenswrapper[4753]: I1205 17:35:38.635883 4753 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/02bce72e-5cac-4c12-b1ba-e08f2b95cbe2-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:38 crc kubenswrapper[4753]: I1205 17:35:38.635895 4753 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/02bce72e-5cac-4c12-b1ba-e08f2b95cbe2-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:38 crc kubenswrapper[4753]: I1205 17:35:38.635904 4753 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02bce72e-5cac-4c12-b1ba-e08f2b95cbe2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:38 crc kubenswrapper[4753]: I1205 17:35:38.635913 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zpqdq\" (UniqueName: \"kubernetes.io/projected/02bce72e-5cac-4c12-b1ba-e08f2b95cbe2-kube-api-access-zpqdq\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:38 crc kubenswrapper[4753]: I1205 17:35:38.635922 4753 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/02bce72e-5cac-4c12-b1ba-e08f2b95cbe2-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:38 crc kubenswrapper[4753]: I1205 17:35:38.668427 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02bce72e-5cac-4c12-b1ba-e08f2b95cbe2-config-data" (OuterVolumeSpecName: "config-data") pod "02bce72e-5cac-4c12-b1ba-e08f2b95cbe2" (UID: "02bce72e-5cac-4c12-b1ba-e08f2b95cbe2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:35:38 crc kubenswrapper[4753]: I1205 17:35:38.738083 4753 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02bce72e-5cac-4c12-b1ba-e08f2b95cbe2-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:38 crc kubenswrapper[4753]: I1205 17:35:38.764218 4753 generic.go:334] "Generic (PLEG): container finished" podID="02bce72e-5cac-4c12-b1ba-e08f2b95cbe2" containerID="67211f695f145a32745ef7a96d02f2c1a5f32b27c2adc3b25b1d2c7eb4612303" exitCode=0 Dec 05 17:35:38 crc kubenswrapper[4753]: I1205 17:35:38.764308 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"02bce72e-5cac-4c12-b1ba-e08f2b95cbe2","Type":"ContainerDied","Data":"67211f695f145a32745ef7a96d02f2c1a5f32b27c2adc3b25b1d2c7eb4612303"} Dec 05 17:35:38 crc kubenswrapper[4753]: I1205 17:35:38.764347 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"02bce72e-5cac-4c12-b1ba-e08f2b95cbe2","Type":"ContainerDied","Data":"1e58d87e769304950025932f88429079091a04eb29ceb1a9f1520002b144c11d"} Dec 05 17:35:38 crc kubenswrapper[4753]: I1205 17:35:38.764401 4753 scope.go:117] "RemoveContainer" containerID="efcc53512ec04fe6e0bf8b43a570210a945873f6202b09f4d0d88b4e5f0dd03e" Dec 05 17:35:38 crc kubenswrapper[4753]: I1205 17:35:38.764616 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 17:35:38 crc kubenswrapper[4753]: I1205 17:35:38.770043 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"187c344a-5fdf-47db-b103-de9458e6a58a","Type":"ContainerStarted","Data":"8aedf99212c455857a7e255b5bd7b037fecf0e5240ffd3eba68f1b106eebeabf"} Dec 05 17:35:38 crc kubenswrapper[4753]: I1205 17:35:38.773910 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"33797bcf-e2f2-4a28-8148-3e027fc342d8","Type":"ContainerStarted","Data":"99f09c4778c4f8a9d6f495399c8cfe058440c83a5cc15c55945fdbcef9c52433"} Dec 05 17:35:38 crc kubenswrapper[4753]: I1205 17:35:38.773952 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"33797bcf-e2f2-4a28-8148-3e027fc342d8","Type":"ContainerStarted","Data":"28b15f2587f8b7f86e35cb27c574c8031a34aabe2a53a98a6663c17ebe9b7c08"} Dec 05 17:35:38 crc kubenswrapper[4753]: I1205 17:35:38.816171 4753 scope.go:117] "RemoveContainer" containerID="58cdb6823e04d7abdb1c84054ca8e3c23b19166526c8b1baf3e245d69183d609" Dec 05 17:35:38 crc kubenswrapper[4753]: I1205 17:35:38.841066 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.84104542 podStartE2EDuration="3.84104542s" podCreationTimestamp="2025-12-05 17:35:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:35:38.799420118 +0000 UTC m=+1877.302527124" watchObservedRunningTime="2025-12-05 17:35:38.84104542 +0000 UTC m=+1877.344152426" Dec 05 17:35:38 crc kubenswrapper[4753]: I1205 17:35:38.855320 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=4.855293625 podStartE2EDuration="4.855293625s" podCreationTimestamp="2025-12-05 17:35:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:35:38.836024678 +0000 UTC m=+1877.339131684" watchObservedRunningTime="2025-12-05 17:35:38.855293625 +0000 UTC m=+1877.358400631" Dec 05 17:35:38 crc kubenswrapper[4753]: I1205 17:35:38.894215 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:35:38 crc kubenswrapper[4753]: I1205 17:35:38.907069 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:35:38 crc kubenswrapper[4753]: I1205 17:35:38.937258 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:35:38 crc kubenswrapper[4753]: E1205 17:35:38.937695 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02bce72e-5cac-4c12-b1ba-e08f2b95cbe2" containerName="sg-core" Dec 05 17:35:38 crc kubenswrapper[4753]: I1205 17:35:38.937712 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="02bce72e-5cac-4c12-b1ba-e08f2b95cbe2" containerName="sg-core" Dec 05 17:35:38 crc kubenswrapper[4753]: E1205 17:35:38.937725 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02bce72e-5cac-4c12-b1ba-e08f2b95cbe2" containerName="ceilometer-notification-agent" Dec 05 17:35:38 crc kubenswrapper[4753]: I1205 17:35:38.937732 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="02bce72e-5cac-4c12-b1ba-e08f2b95cbe2" containerName="ceilometer-notification-agent" Dec 05 17:35:38 crc kubenswrapper[4753]: E1205 17:35:38.937755 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02bce72e-5cac-4c12-b1ba-e08f2b95cbe2" containerName="proxy-httpd" Dec 05 17:35:38 crc kubenswrapper[4753]: I1205 17:35:38.937761 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="02bce72e-5cac-4c12-b1ba-e08f2b95cbe2" containerName="proxy-httpd" Dec 05 17:35:38 crc kubenswrapper[4753]: E1205 17:35:38.937786 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02bce72e-5cac-4c12-b1ba-e08f2b95cbe2" containerName="ceilometer-central-agent" Dec 05 17:35:38 crc kubenswrapper[4753]: I1205 17:35:38.937792 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="02bce72e-5cac-4c12-b1ba-e08f2b95cbe2" containerName="ceilometer-central-agent" Dec 05 17:35:38 crc kubenswrapper[4753]: I1205 17:35:38.937980 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="02bce72e-5cac-4c12-b1ba-e08f2b95cbe2" containerName="sg-core" Dec 05 17:35:38 crc kubenswrapper[4753]: I1205 17:35:38.937997 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="02bce72e-5cac-4c12-b1ba-e08f2b95cbe2" containerName="ceilometer-central-agent" Dec 05 17:35:38 crc kubenswrapper[4753]: I1205 17:35:38.938010 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="02bce72e-5cac-4c12-b1ba-e08f2b95cbe2" containerName="ceilometer-notification-agent" Dec 05 17:35:38 crc kubenswrapper[4753]: I1205 17:35:38.938030 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="02bce72e-5cac-4c12-b1ba-e08f2b95cbe2" containerName="proxy-httpd" Dec 05 17:35:38 crc kubenswrapper[4753]: I1205 17:35:38.939972 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 17:35:38 crc kubenswrapper[4753]: I1205 17:35:38.941377 4753 scope.go:117] "RemoveContainer" containerID="9ff6f3d5edaee594599b021425b5445fbe7e30b656cc2d968504fb5c6c7dbc00" Dec 05 17:35:38 crc kubenswrapper[4753]: I1205 17:35:38.943472 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 17:35:38 crc kubenswrapper[4753]: I1205 17:35:38.944515 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 17:35:38 crc kubenswrapper[4753]: I1205 17:35:38.969521 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:35:39 crc kubenswrapper[4753]: I1205 17:35:39.016363 4753 scope.go:117] "RemoveContainer" containerID="67211f695f145a32745ef7a96d02f2c1a5f32b27c2adc3b25b1d2c7eb4612303" Dec 05 17:35:39 crc kubenswrapper[4753]: I1205 17:35:39.048288 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d50ea472-ae7e-45eb-b08c-f69dcb662af2-log-httpd\") pod \"ceilometer-0\" (UID: \"d50ea472-ae7e-45eb-b08c-f69dcb662af2\") " pod="openstack/ceilometer-0" Dec 05 17:35:39 crc kubenswrapper[4753]: I1205 17:35:39.048343 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d50ea472-ae7e-45eb-b08c-f69dcb662af2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d50ea472-ae7e-45eb-b08c-f69dcb662af2\") " pod="openstack/ceilometer-0" Dec 05 17:35:39 crc kubenswrapper[4753]: I1205 17:35:39.048529 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d50ea472-ae7e-45eb-b08c-f69dcb662af2-run-httpd\") pod \"ceilometer-0\" (UID: \"d50ea472-ae7e-45eb-b08c-f69dcb662af2\") " pod="openstack/ceilometer-0" Dec 05 17:35:39 crc kubenswrapper[4753]: I1205 17:35:39.048634 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d50ea472-ae7e-45eb-b08c-f69dcb662af2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d50ea472-ae7e-45eb-b08c-f69dcb662af2\") " pod="openstack/ceilometer-0" Dec 05 17:35:39 crc kubenswrapper[4753]: I1205 17:35:39.048684 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-69rsw\" (UniqueName: \"kubernetes.io/projected/d50ea472-ae7e-45eb-b08c-f69dcb662af2-kube-api-access-69rsw\") pod \"ceilometer-0\" (UID: \"d50ea472-ae7e-45eb-b08c-f69dcb662af2\") " pod="openstack/ceilometer-0" Dec 05 17:35:39 crc kubenswrapper[4753]: I1205 17:35:39.048709 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d50ea472-ae7e-45eb-b08c-f69dcb662af2-scripts\") pod \"ceilometer-0\" (UID: \"d50ea472-ae7e-45eb-b08c-f69dcb662af2\") " pod="openstack/ceilometer-0" Dec 05 17:35:39 crc kubenswrapper[4753]: I1205 17:35:39.048730 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d50ea472-ae7e-45eb-b08c-f69dcb662af2-config-data\") pod \"ceilometer-0\" (UID: \"d50ea472-ae7e-45eb-b08c-f69dcb662af2\") " pod="openstack/ceilometer-0" Dec 05 17:35:39 crc kubenswrapper[4753]: I1205 17:35:39.053272 4753 scope.go:117] "RemoveContainer" containerID="efcc53512ec04fe6e0bf8b43a570210a945873f6202b09f4d0d88b4e5f0dd03e" Dec 05 17:35:39 crc kubenswrapper[4753]: E1205 17:35:39.053726 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"efcc53512ec04fe6e0bf8b43a570210a945873f6202b09f4d0d88b4e5f0dd03e\": container with ID starting with efcc53512ec04fe6e0bf8b43a570210a945873f6202b09f4d0d88b4e5f0dd03e not found: ID does not exist" containerID="efcc53512ec04fe6e0bf8b43a570210a945873f6202b09f4d0d88b4e5f0dd03e" Dec 05 17:35:39 crc kubenswrapper[4753]: I1205 17:35:39.053758 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"efcc53512ec04fe6e0bf8b43a570210a945873f6202b09f4d0d88b4e5f0dd03e"} err="failed to get container status \"efcc53512ec04fe6e0bf8b43a570210a945873f6202b09f4d0d88b4e5f0dd03e\": rpc error: code = NotFound desc = could not find container \"efcc53512ec04fe6e0bf8b43a570210a945873f6202b09f4d0d88b4e5f0dd03e\": container with ID starting with efcc53512ec04fe6e0bf8b43a570210a945873f6202b09f4d0d88b4e5f0dd03e not found: ID does not exist" Dec 05 17:35:39 crc kubenswrapper[4753]: I1205 17:35:39.053798 4753 scope.go:117] "RemoveContainer" containerID="58cdb6823e04d7abdb1c84054ca8e3c23b19166526c8b1baf3e245d69183d609" Dec 05 17:35:39 crc kubenswrapper[4753]: E1205 17:35:39.054007 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"58cdb6823e04d7abdb1c84054ca8e3c23b19166526c8b1baf3e245d69183d609\": container with ID starting with 58cdb6823e04d7abdb1c84054ca8e3c23b19166526c8b1baf3e245d69183d609 not found: ID does not exist" containerID="58cdb6823e04d7abdb1c84054ca8e3c23b19166526c8b1baf3e245d69183d609" Dec 05 17:35:39 crc kubenswrapper[4753]: I1205 17:35:39.054047 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"58cdb6823e04d7abdb1c84054ca8e3c23b19166526c8b1baf3e245d69183d609"} err="failed to get container status \"58cdb6823e04d7abdb1c84054ca8e3c23b19166526c8b1baf3e245d69183d609\": rpc error: code = NotFound desc = could not find container \"58cdb6823e04d7abdb1c84054ca8e3c23b19166526c8b1baf3e245d69183d609\": container with ID starting with 58cdb6823e04d7abdb1c84054ca8e3c23b19166526c8b1baf3e245d69183d609 not found: ID does not exist" Dec 05 17:35:39 crc kubenswrapper[4753]: I1205 17:35:39.054061 4753 scope.go:117] "RemoveContainer" containerID="9ff6f3d5edaee594599b021425b5445fbe7e30b656cc2d968504fb5c6c7dbc00" Dec 05 17:35:39 crc kubenswrapper[4753]: E1205 17:35:39.054289 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9ff6f3d5edaee594599b021425b5445fbe7e30b656cc2d968504fb5c6c7dbc00\": container with ID starting with 9ff6f3d5edaee594599b021425b5445fbe7e30b656cc2d968504fb5c6c7dbc00 not found: ID does not exist" containerID="9ff6f3d5edaee594599b021425b5445fbe7e30b656cc2d968504fb5c6c7dbc00" Dec 05 17:35:39 crc kubenswrapper[4753]: I1205 17:35:39.054327 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9ff6f3d5edaee594599b021425b5445fbe7e30b656cc2d968504fb5c6c7dbc00"} err="failed to get container status \"9ff6f3d5edaee594599b021425b5445fbe7e30b656cc2d968504fb5c6c7dbc00\": rpc error: code = NotFound desc = could not find container \"9ff6f3d5edaee594599b021425b5445fbe7e30b656cc2d968504fb5c6c7dbc00\": container with ID starting with 9ff6f3d5edaee594599b021425b5445fbe7e30b656cc2d968504fb5c6c7dbc00 not found: ID does not exist" Dec 05 17:35:39 crc kubenswrapper[4753]: I1205 17:35:39.054340 4753 scope.go:117] "RemoveContainer" containerID="67211f695f145a32745ef7a96d02f2c1a5f32b27c2adc3b25b1d2c7eb4612303" Dec 05 17:35:39 crc kubenswrapper[4753]: E1205 17:35:39.054539 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"67211f695f145a32745ef7a96d02f2c1a5f32b27c2adc3b25b1d2c7eb4612303\": container with ID starting with 67211f695f145a32745ef7a96d02f2c1a5f32b27c2adc3b25b1d2c7eb4612303 not found: ID does not exist" containerID="67211f695f145a32745ef7a96d02f2c1a5f32b27c2adc3b25b1d2c7eb4612303" Dec 05 17:35:39 crc kubenswrapper[4753]: I1205 17:35:39.054597 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"67211f695f145a32745ef7a96d02f2c1a5f32b27c2adc3b25b1d2c7eb4612303"} err="failed to get container status \"67211f695f145a32745ef7a96d02f2c1a5f32b27c2adc3b25b1d2c7eb4612303\": rpc error: code = NotFound desc = could not find container \"67211f695f145a32745ef7a96d02f2c1a5f32b27c2adc3b25b1d2c7eb4612303\": container with ID starting with 67211f695f145a32745ef7a96d02f2c1a5f32b27c2adc3b25b1d2c7eb4612303 not found: ID does not exist" Dec 05 17:35:39 crc kubenswrapper[4753]: I1205 17:35:39.150050 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-69rsw\" (UniqueName: \"kubernetes.io/projected/d50ea472-ae7e-45eb-b08c-f69dcb662af2-kube-api-access-69rsw\") pod \"ceilometer-0\" (UID: \"d50ea472-ae7e-45eb-b08c-f69dcb662af2\") " pod="openstack/ceilometer-0" Dec 05 17:35:39 crc kubenswrapper[4753]: I1205 17:35:39.150104 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d50ea472-ae7e-45eb-b08c-f69dcb662af2-scripts\") pod \"ceilometer-0\" (UID: \"d50ea472-ae7e-45eb-b08c-f69dcb662af2\") " pod="openstack/ceilometer-0" Dec 05 17:35:39 crc kubenswrapper[4753]: I1205 17:35:39.150133 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d50ea472-ae7e-45eb-b08c-f69dcb662af2-config-data\") pod \"ceilometer-0\" (UID: \"d50ea472-ae7e-45eb-b08c-f69dcb662af2\") " pod="openstack/ceilometer-0" Dec 05 17:35:39 crc kubenswrapper[4753]: I1205 17:35:39.150242 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d50ea472-ae7e-45eb-b08c-f69dcb662af2-log-httpd\") pod \"ceilometer-0\" (UID: \"d50ea472-ae7e-45eb-b08c-f69dcb662af2\") " pod="openstack/ceilometer-0" Dec 05 17:35:39 crc kubenswrapper[4753]: I1205 17:35:39.150263 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d50ea472-ae7e-45eb-b08c-f69dcb662af2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d50ea472-ae7e-45eb-b08c-f69dcb662af2\") " pod="openstack/ceilometer-0" Dec 05 17:35:39 crc kubenswrapper[4753]: I1205 17:35:39.150299 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d50ea472-ae7e-45eb-b08c-f69dcb662af2-run-httpd\") pod \"ceilometer-0\" (UID: \"d50ea472-ae7e-45eb-b08c-f69dcb662af2\") " pod="openstack/ceilometer-0" Dec 05 17:35:39 crc kubenswrapper[4753]: I1205 17:35:39.150327 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d50ea472-ae7e-45eb-b08c-f69dcb662af2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d50ea472-ae7e-45eb-b08c-f69dcb662af2\") " pod="openstack/ceilometer-0" Dec 05 17:35:39 crc kubenswrapper[4753]: I1205 17:35:39.151378 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d50ea472-ae7e-45eb-b08c-f69dcb662af2-run-httpd\") pod \"ceilometer-0\" (UID: \"d50ea472-ae7e-45eb-b08c-f69dcb662af2\") " pod="openstack/ceilometer-0" Dec 05 17:35:39 crc kubenswrapper[4753]: I1205 17:35:39.151551 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d50ea472-ae7e-45eb-b08c-f69dcb662af2-log-httpd\") pod \"ceilometer-0\" (UID: \"d50ea472-ae7e-45eb-b08c-f69dcb662af2\") " pod="openstack/ceilometer-0" Dec 05 17:35:39 crc kubenswrapper[4753]: I1205 17:35:39.158977 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d50ea472-ae7e-45eb-b08c-f69dcb662af2-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d50ea472-ae7e-45eb-b08c-f69dcb662af2\") " pod="openstack/ceilometer-0" Dec 05 17:35:39 crc kubenswrapper[4753]: I1205 17:35:39.162561 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d50ea472-ae7e-45eb-b08c-f69dcb662af2-config-data\") pod \"ceilometer-0\" (UID: \"d50ea472-ae7e-45eb-b08c-f69dcb662af2\") " pod="openstack/ceilometer-0" Dec 05 17:35:39 crc kubenswrapper[4753]: I1205 17:35:39.163122 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d50ea472-ae7e-45eb-b08c-f69dcb662af2-scripts\") pod \"ceilometer-0\" (UID: \"d50ea472-ae7e-45eb-b08c-f69dcb662af2\") " pod="openstack/ceilometer-0" Dec 05 17:35:39 crc kubenswrapper[4753]: I1205 17:35:39.163699 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d50ea472-ae7e-45eb-b08c-f69dcb662af2-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d50ea472-ae7e-45eb-b08c-f69dcb662af2\") " pod="openstack/ceilometer-0" Dec 05 17:35:39 crc kubenswrapper[4753]: I1205 17:35:39.167640 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-69rsw\" (UniqueName: \"kubernetes.io/projected/d50ea472-ae7e-45eb-b08c-f69dcb662af2-kube-api-access-69rsw\") pod \"ceilometer-0\" (UID: \"d50ea472-ae7e-45eb-b08c-f69dcb662af2\") " pod="openstack/ceilometer-0" Dec 05 17:35:39 crc kubenswrapper[4753]: I1205 17:35:39.271781 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 17:35:39 crc kubenswrapper[4753]: I1205 17:35:39.398061 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-7rkhf" Dec 05 17:35:39 crc kubenswrapper[4753]: I1205 17:35:39.561457 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4xxtx\" (UniqueName: \"kubernetes.io/projected/b33874b7-5966-44b0-9a9c-a555e52127b9-kube-api-access-4xxtx\") pod \"b33874b7-5966-44b0-9a9c-a555e52127b9\" (UID: \"b33874b7-5966-44b0-9a9c-a555e52127b9\") " Dec 05 17:35:39 crc kubenswrapper[4753]: I1205 17:35:39.562624 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b33874b7-5966-44b0-9a9c-a555e52127b9-operator-scripts\") pod \"b33874b7-5966-44b0-9a9c-a555e52127b9\" (UID: \"b33874b7-5966-44b0-9a9c-a555e52127b9\") " Dec 05 17:35:39 crc kubenswrapper[4753]: I1205 17:35:39.565311 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b33874b7-5966-44b0-9a9c-a555e52127b9-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b33874b7-5966-44b0-9a9c-a555e52127b9" (UID: "b33874b7-5966-44b0-9a9c-a555e52127b9"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:35:39 crc kubenswrapper[4753]: I1205 17:35:39.576757 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b33874b7-5966-44b0-9a9c-a555e52127b9-kube-api-access-4xxtx" (OuterVolumeSpecName: "kube-api-access-4xxtx") pod "b33874b7-5966-44b0-9a9c-a555e52127b9" (UID: "b33874b7-5966-44b0-9a9c-a555e52127b9"). InnerVolumeSpecName "kube-api-access-4xxtx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:35:39 crc kubenswrapper[4753]: I1205 17:35:39.586773 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4xxtx\" (UniqueName: \"kubernetes.io/projected/b33874b7-5966-44b0-9a9c-a555e52127b9-kube-api-access-4xxtx\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:39 crc kubenswrapper[4753]: I1205 17:35:39.586819 4753 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b33874b7-5966-44b0-9a9c-a555e52127b9-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:39 crc kubenswrapper[4753]: I1205 17:35:39.653727 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-ncw7r" Dec 05 17:35:39 crc kubenswrapper[4753]: I1205 17:35:39.696712 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-134b-account-create-update-qf8qh" Dec 05 17:35:39 crc kubenswrapper[4753]: I1205 17:35:39.719038 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-ae67-account-create-update-lxk95" Dec 05 17:35:39 crc kubenswrapper[4753]: I1205 17:35:39.731257 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-080f-account-create-update-hsplf" Dec 05 17:35:39 crc kubenswrapper[4753]: I1205 17:35:39.792868 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="02bce72e-5cac-4c12-b1ba-e08f2b95cbe2" path="/var/lib/kubelet/pods/02bce72e-5cac-4c12-b1ba-e08f2b95cbe2/volumes" Dec 05 17:35:39 crc kubenswrapper[4753]: I1205 17:35:39.803549 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dtvzp\" (UniqueName: \"kubernetes.io/projected/8b231cbb-8d95-4948-aba8-825809d77fa7-kube-api-access-dtvzp\") pod \"8b231cbb-8d95-4948-aba8-825809d77fa7\" (UID: \"8b231cbb-8d95-4948-aba8-825809d77fa7\") " Dec 05 17:35:39 crc kubenswrapper[4753]: I1205 17:35:39.803667 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8b231cbb-8d95-4948-aba8-825809d77fa7-operator-scripts\") pod \"8b231cbb-8d95-4948-aba8-825809d77fa7\" (UID: \"8b231cbb-8d95-4948-aba8-825809d77fa7\") " Dec 05 17:35:39 crc kubenswrapper[4753]: I1205 17:35:39.804812 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8b231cbb-8d95-4948-aba8-825809d77fa7-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "8b231cbb-8d95-4948-aba8-825809d77fa7" (UID: "8b231cbb-8d95-4948-aba8-825809d77fa7"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:35:39 crc kubenswrapper[4753]: I1205 17:35:39.811360 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-qvb5q" Dec 05 17:35:39 crc kubenswrapper[4753]: I1205 17:35:39.831421 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8b231cbb-8d95-4948-aba8-825809d77fa7-kube-api-access-dtvzp" (OuterVolumeSpecName: "kube-api-access-dtvzp") pod "8b231cbb-8d95-4948-aba8-825809d77fa7" (UID: "8b231cbb-8d95-4948-aba8-825809d77fa7"). InnerVolumeSpecName "kube-api-access-dtvzp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:35:39 crc kubenswrapper[4753]: I1205 17:35:39.859476 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-ncw7r" Dec 05 17:35:39 crc kubenswrapper[4753]: I1205 17:35:39.904613 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-7rkhf" Dec 05 17:35:39 crc kubenswrapper[4753]: I1205 17:35:39.908390 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qmzlk\" (UniqueName: \"kubernetes.io/projected/d9465bae-5a66-4ab6-956b-1258eb08db35-kube-api-access-qmzlk\") pod \"d9465bae-5a66-4ab6-956b-1258eb08db35\" (UID: \"d9465bae-5a66-4ab6-956b-1258eb08db35\") " Dec 05 17:35:39 crc kubenswrapper[4753]: I1205 17:35:39.908497 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xrkj9\" (UniqueName: \"kubernetes.io/projected/762045ba-049a-471e-b6dc-ac74b0c28bfa-kube-api-access-xrkj9\") pod \"762045ba-049a-471e-b6dc-ac74b0c28bfa\" (UID: \"762045ba-049a-471e-b6dc-ac74b0c28bfa\") " Dec 05 17:35:39 crc kubenswrapper[4753]: I1205 17:35:39.908530 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qlqt8\" (UniqueName: \"kubernetes.io/projected/2f79146b-e55c-4ccf-93b3-91829167768b-kube-api-access-qlqt8\") pod \"2f79146b-e55c-4ccf-93b3-91829167768b\" (UID: \"2f79146b-e55c-4ccf-93b3-91829167768b\") " Dec 05 17:35:39 crc kubenswrapper[4753]: I1205 17:35:39.908580 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2f79146b-e55c-4ccf-93b3-91829167768b-operator-scripts\") pod \"2f79146b-e55c-4ccf-93b3-91829167768b\" (UID: \"2f79146b-e55c-4ccf-93b3-91829167768b\") " Dec 05 17:35:39 crc kubenswrapper[4753]: I1205 17:35:39.908604 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/762045ba-049a-471e-b6dc-ac74b0c28bfa-operator-scripts\") pod \"762045ba-049a-471e-b6dc-ac74b0c28bfa\" (UID: \"762045ba-049a-471e-b6dc-ac74b0c28bfa\") " Dec 05 17:35:39 crc kubenswrapper[4753]: I1205 17:35:39.908695 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gzq7b\" (UniqueName: \"kubernetes.io/projected/de05c77d-b326-4c0c-996f-a78f35ae6694-kube-api-access-gzq7b\") pod \"de05c77d-b326-4c0c-996f-a78f35ae6694\" (UID: \"de05c77d-b326-4c0c-996f-a78f35ae6694\") " Dec 05 17:35:39 crc kubenswrapper[4753]: I1205 17:35:39.908767 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d9465bae-5a66-4ab6-956b-1258eb08db35-operator-scripts\") pod \"d9465bae-5a66-4ab6-956b-1258eb08db35\" (UID: \"d9465bae-5a66-4ab6-956b-1258eb08db35\") " Dec 05 17:35:39 crc kubenswrapper[4753]: I1205 17:35:39.908805 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/de05c77d-b326-4c0c-996f-a78f35ae6694-operator-scripts\") pod \"de05c77d-b326-4c0c-996f-a78f35ae6694\" (UID: \"de05c77d-b326-4c0c-996f-a78f35ae6694\") " Dec 05 17:35:39 crc kubenswrapper[4753]: I1205 17:35:39.909424 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dtvzp\" (UniqueName: \"kubernetes.io/projected/8b231cbb-8d95-4948-aba8-825809d77fa7-kube-api-access-dtvzp\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:39 crc kubenswrapper[4753]: I1205 17:35:39.909441 4753 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/8b231cbb-8d95-4948-aba8-825809d77fa7-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:39 crc kubenswrapper[4753]: I1205 17:35:39.923543 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/762045ba-049a-471e-b6dc-ac74b0c28bfa-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "762045ba-049a-471e-b6dc-ac74b0c28bfa" (UID: "762045ba-049a-471e-b6dc-ac74b0c28bfa"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:35:39 crc kubenswrapper[4753]: I1205 17:35:39.925502 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2f79146b-e55c-4ccf-93b3-91829167768b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "2f79146b-e55c-4ccf-93b3-91829167768b" (UID: "2f79146b-e55c-4ccf-93b3-91829167768b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:35:39 crc kubenswrapper[4753]: I1205 17:35:39.925891 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d9465bae-5a66-4ab6-956b-1258eb08db35-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d9465bae-5a66-4ab6-956b-1258eb08db35" (UID: "d9465bae-5a66-4ab6-956b-1258eb08db35"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:35:39 crc kubenswrapper[4753]: I1205 17:35:39.926218 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/de05c77d-b326-4c0c-996f-a78f35ae6694-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "de05c77d-b326-4c0c-996f-a78f35ae6694" (UID: "de05c77d-b326-4c0c-996f-a78f35ae6694"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:35:39 crc kubenswrapper[4753]: I1205 17:35:39.934666 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d9465bae-5a66-4ab6-956b-1258eb08db35-kube-api-access-qmzlk" (OuterVolumeSpecName: "kube-api-access-qmzlk") pod "d9465bae-5a66-4ab6-956b-1258eb08db35" (UID: "d9465bae-5a66-4ab6-956b-1258eb08db35"). InnerVolumeSpecName "kube-api-access-qmzlk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:35:39 crc kubenswrapper[4753]: I1205 17:35:39.951973 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-ae67-account-create-update-lxk95" Dec 05 17:35:39 crc kubenswrapper[4753]: I1205 17:35:39.954469 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/de05c77d-b326-4c0c-996f-a78f35ae6694-kube-api-access-gzq7b" (OuterVolumeSpecName: "kube-api-access-gzq7b") pod "de05c77d-b326-4c0c-996f-a78f35ae6694" (UID: "de05c77d-b326-4c0c-996f-a78f35ae6694"). InnerVolumeSpecName "kube-api-access-gzq7b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:35:39 crc kubenswrapper[4753]: I1205 17:35:39.954597 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2f79146b-e55c-4ccf-93b3-91829167768b-kube-api-access-qlqt8" (OuterVolumeSpecName: "kube-api-access-qlqt8") pod "2f79146b-e55c-4ccf-93b3-91829167768b" (UID: "2f79146b-e55c-4ccf-93b3-91829167768b"). InnerVolumeSpecName "kube-api-access-qlqt8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:35:39 crc kubenswrapper[4753]: I1205 17:35:39.970025 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/762045ba-049a-471e-b6dc-ac74b0c28bfa-kube-api-access-xrkj9" (OuterVolumeSpecName: "kube-api-access-xrkj9") pod "762045ba-049a-471e-b6dc-ac74b0c28bfa" (UID: "762045ba-049a-471e-b6dc-ac74b0c28bfa"). InnerVolumeSpecName "kube-api-access-xrkj9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:35:39 crc kubenswrapper[4753]: I1205 17:35:39.987225 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-qvb5q" Dec 05 17:35:40 crc kubenswrapper[4753]: I1205 17:35:40.006438 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-134b-account-create-update-qf8qh" Dec 05 17:35:40 crc kubenswrapper[4753]: I1205 17:35:40.016851 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gzq7b\" (UniqueName: \"kubernetes.io/projected/de05c77d-b326-4c0c-996f-a78f35ae6694-kube-api-access-gzq7b\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:40 crc kubenswrapper[4753]: I1205 17:35:40.016886 4753 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d9465bae-5a66-4ab6-956b-1258eb08db35-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:40 crc kubenswrapper[4753]: I1205 17:35:40.016900 4753 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/de05c77d-b326-4c0c-996f-a78f35ae6694-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:40 crc kubenswrapper[4753]: I1205 17:35:40.016911 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qmzlk\" (UniqueName: \"kubernetes.io/projected/d9465bae-5a66-4ab6-956b-1258eb08db35-kube-api-access-qmzlk\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:40 crc kubenswrapper[4753]: I1205 17:35:40.016921 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xrkj9\" (UniqueName: \"kubernetes.io/projected/762045ba-049a-471e-b6dc-ac74b0c28bfa-kube-api-access-xrkj9\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:40 crc kubenswrapper[4753]: I1205 17:35:40.016931 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qlqt8\" (UniqueName: \"kubernetes.io/projected/2f79146b-e55c-4ccf-93b3-91829167768b-kube-api-access-qlqt8\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:40 crc kubenswrapper[4753]: I1205 17:35:40.016941 4753 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2f79146b-e55c-4ccf-93b3-91829167768b-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:40 crc kubenswrapper[4753]: I1205 17:35:40.016951 4753 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/762045ba-049a-471e-b6dc-ac74b0c28bfa-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:40 crc kubenswrapper[4753]: I1205 17:35:40.042616 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-080f-account-create-update-hsplf" Dec 05 17:35:40 crc kubenswrapper[4753]: I1205 17:35:40.064723 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-ncw7r" event={"ID":"8b231cbb-8d95-4948-aba8-825809d77fa7","Type":"ContainerDied","Data":"dcc835b972982ba9c07d11946943f849a262e7bb3dd532847b1c9149dcd8f1f3"} Dec 05 17:35:40 crc kubenswrapper[4753]: I1205 17:35:40.064816 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dcc835b972982ba9c07d11946943f849a262e7bb3dd532847b1c9149dcd8f1f3" Dec 05 17:35:40 crc kubenswrapper[4753]: I1205 17:35:40.064834 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-7rkhf" event={"ID":"b33874b7-5966-44b0-9a9c-a555e52127b9","Type":"ContainerDied","Data":"64ae1a085a1278d3668e862e5f45daf58be6492d0a5512d9bc2806d3729a3d27"} Dec 05 17:35:40 crc kubenswrapper[4753]: I1205 17:35:40.064847 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="64ae1a085a1278d3668e862e5f45daf58be6492d0a5512d9bc2806d3729a3d27" Dec 05 17:35:40 crc kubenswrapper[4753]: I1205 17:35:40.064854 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-ae67-account-create-update-lxk95" event={"ID":"d9465bae-5a66-4ab6-956b-1258eb08db35","Type":"ContainerDied","Data":"c053a2888000b4f0212cc48f8772f2bffde1d7e189c44d06ab17309d953eee68"} Dec 05 17:35:40 crc kubenswrapper[4753]: I1205 17:35:40.064863 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c053a2888000b4f0212cc48f8772f2bffde1d7e189c44d06ab17309d953eee68" Dec 05 17:35:40 crc kubenswrapper[4753]: I1205 17:35:40.064872 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-qvb5q" event={"ID":"762045ba-049a-471e-b6dc-ac74b0c28bfa","Type":"ContainerDied","Data":"45a306a872fb9b6d223392f0bd6b05c479d7d90d9b427bd7e7f0b1bcea8fd1e4"} Dec 05 17:35:40 crc kubenswrapper[4753]: I1205 17:35:40.064881 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="45a306a872fb9b6d223392f0bd6b05c479d7d90d9b427bd7e7f0b1bcea8fd1e4" Dec 05 17:35:40 crc kubenswrapper[4753]: I1205 17:35:40.064923 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-134b-account-create-update-qf8qh" event={"ID":"de05c77d-b326-4c0c-996f-a78f35ae6694","Type":"ContainerDied","Data":"a71c865f17a2f1faf4cf2f18d727b1f459837025ce9e6f42fea141b14ccf54f5"} Dec 05 17:35:40 crc kubenswrapper[4753]: I1205 17:35:40.064935 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a71c865f17a2f1faf4cf2f18d727b1f459837025ce9e6f42fea141b14ccf54f5" Dec 05 17:35:40 crc kubenswrapper[4753]: I1205 17:35:40.064943 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-080f-account-create-update-hsplf" event={"ID":"2f79146b-e55c-4ccf-93b3-91829167768b","Type":"ContainerDied","Data":"b4210f6662db0df12c722a895a87e8808b22ab34a1abb8faa0b89bff57f332a6"} Dec 05 17:35:40 crc kubenswrapper[4753]: I1205 17:35:40.064955 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b4210f6662db0df12c722a895a87e8808b22ab34a1abb8faa0b89bff57f332a6" Dec 05 17:35:40 crc kubenswrapper[4753]: I1205 17:35:40.445504 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:35:41 crc kubenswrapper[4753]: I1205 17:35:41.059374 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d50ea472-ae7e-45eb-b08c-f69dcb662af2","Type":"ContainerStarted","Data":"faff197016b8b63c1faade060af6489c5ecb434bf2be4324b8cd78a9c783c95a"} Dec 05 17:35:42 crc kubenswrapper[4753]: I1205 17:35:42.073333 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d50ea472-ae7e-45eb-b08c-f69dcb662af2","Type":"ContainerStarted","Data":"f002ecf75d7df9729692cc36a71ab6d1e1cb8778fbd4d24ebf563bab2c8f5fbb"} Dec 05 17:35:42 crc kubenswrapper[4753]: I1205 17:35:42.720579 4753 scope.go:117] "RemoveContainer" containerID="87682a74661e693e498cd793cc20d16fc9f4a3b8a1a6b54f10285e2dcd15eafd" Dec 05 17:35:42 crc kubenswrapper[4753]: E1205 17:35:42.721165 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 17:35:43 crc kubenswrapper[4753]: I1205 17:35:43.087580 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d50ea472-ae7e-45eb-b08c-f69dcb662af2","Type":"ContainerStarted","Data":"4e4528765c9d046af370996c1510c6447f560908171e1d7adb81bd34c10f0b96"} Dec 05 17:35:44 crc kubenswrapper[4753]: I1205 17:35:44.102696 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d50ea472-ae7e-45eb-b08c-f69dcb662af2","Type":"ContainerStarted","Data":"c8c9cb43224f3bbe31fe7cd8fd542ad904c54e1979cae0efc475a56eec8e0f17"} Dec 05 17:35:44 crc kubenswrapper[4753]: I1205 17:35:44.712205 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-z8vmq"] Dec 05 17:35:44 crc kubenswrapper[4753]: E1205 17:35:44.712847 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de05c77d-b326-4c0c-996f-a78f35ae6694" containerName="mariadb-account-create-update" Dec 05 17:35:44 crc kubenswrapper[4753]: I1205 17:35:44.712863 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="de05c77d-b326-4c0c-996f-a78f35ae6694" containerName="mariadb-account-create-update" Dec 05 17:35:44 crc kubenswrapper[4753]: E1205 17:35:44.712899 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="762045ba-049a-471e-b6dc-ac74b0c28bfa" containerName="mariadb-database-create" Dec 05 17:35:44 crc kubenswrapper[4753]: I1205 17:35:44.712906 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="762045ba-049a-471e-b6dc-ac74b0c28bfa" containerName="mariadb-database-create" Dec 05 17:35:44 crc kubenswrapper[4753]: E1205 17:35:44.712918 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b33874b7-5966-44b0-9a9c-a555e52127b9" containerName="mariadb-database-create" Dec 05 17:35:44 crc kubenswrapper[4753]: I1205 17:35:44.712925 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="b33874b7-5966-44b0-9a9c-a555e52127b9" containerName="mariadb-database-create" Dec 05 17:35:44 crc kubenswrapper[4753]: E1205 17:35:44.712945 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9465bae-5a66-4ab6-956b-1258eb08db35" containerName="mariadb-account-create-update" Dec 05 17:35:44 crc kubenswrapper[4753]: I1205 17:35:44.712951 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9465bae-5a66-4ab6-956b-1258eb08db35" containerName="mariadb-account-create-update" Dec 05 17:35:44 crc kubenswrapper[4753]: E1205 17:35:44.712962 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b231cbb-8d95-4948-aba8-825809d77fa7" containerName="mariadb-database-create" Dec 05 17:35:44 crc kubenswrapper[4753]: I1205 17:35:44.712967 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b231cbb-8d95-4948-aba8-825809d77fa7" containerName="mariadb-database-create" Dec 05 17:35:44 crc kubenswrapper[4753]: E1205 17:35:44.712993 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f79146b-e55c-4ccf-93b3-91829167768b" containerName="mariadb-account-create-update" Dec 05 17:35:44 crc kubenswrapper[4753]: I1205 17:35:44.713000 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f79146b-e55c-4ccf-93b3-91829167768b" containerName="mariadb-account-create-update" Dec 05 17:35:44 crc kubenswrapper[4753]: I1205 17:35:44.713193 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="8b231cbb-8d95-4948-aba8-825809d77fa7" containerName="mariadb-database-create" Dec 05 17:35:44 crc kubenswrapper[4753]: I1205 17:35:44.713208 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="d9465bae-5a66-4ab6-956b-1258eb08db35" containerName="mariadb-account-create-update" Dec 05 17:35:44 crc kubenswrapper[4753]: I1205 17:35:44.713216 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="b33874b7-5966-44b0-9a9c-a555e52127b9" containerName="mariadb-database-create" Dec 05 17:35:44 crc kubenswrapper[4753]: I1205 17:35:44.713230 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="762045ba-049a-471e-b6dc-ac74b0c28bfa" containerName="mariadb-database-create" Dec 05 17:35:44 crc kubenswrapper[4753]: I1205 17:35:44.713240 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f79146b-e55c-4ccf-93b3-91829167768b" containerName="mariadb-account-create-update" Dec 05 17:35:44 crc kubenswrapper[4753]: I1205 17:35:44.713263 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="de05c77d-b326-4c0c-996f-a78f35ae6694" containerName="mariadb-account-create-update" Dec 05 17:35:44 crc kubenswrapper[4753]: I1205 17:35:44.713949 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-z8vmq" Dec 05 17:35:44 crc kubenswrapper[4753]: I1205 17:35:44.716075 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Dec 05 17:35:44 crc kubenswrapper[4753]: I1205 17:35:44.716250 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-blgqz" Dec 05 17:35:44 crc kubenswrapper[4753]: I1205 17:35:44.716998 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 05 17:35:44 crc kubenswrapper[4753]: I1205 17:35:44.728215 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-z8vmq"] Dec 05 17:35:44 crc kubenswrapper[4753]: I1205 17:35:44.737124 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/903b9bd6-7d66-4fbf-8dc6-dfd6a57bab2b-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-z8vmq\" (UID: \"903b9bd6-7d66-4fbf-8dc6-dfd6a57bab2b\") " pod="openstack/nova-cell0-conductor-db-sync-z8vmq" Dec 05 17:35:44 crc kubenswrapper[4753]: I1205 17:35:44.737252 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gh2v4\" (UniqueName: \"kubernetes.io/projected/903b9bd6-7d66-4fbf-8dc6-dfd6a57bab2b-kube-api-access-gh2v4\") pod \"nova-cell0-conductor-db-sync-z8vmq\" (UID: \"903b9bd6-7d66-4fbf-8dc6-dfd6a57bab2b\") " pod="openstack/nova-cell0-conductor-db-sync-z8vmq" Dec 05 17:35:44 crc kubenswrapper[4753]: I1205 17:35:44.737290 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/903b9bd6-7d66-4fbf-8dc6-dfd6a57bab2b-config-data\") pod \"nova-cell0-conductor-db-sync-z8vmq\" (UID: \"903b9bd6-7d66-4fbf-8dc6-dfd6a57bab2b\") " pod="openstack/nova-cell0-conductor-db-sync-z8vmq" Dec 05 17:35:44 crc kubenswrapper[4753]: I1205 17:35:44.737319 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/903b9bd6-7d66-4fbf-8dc6-dfd6a57bab2b-scripts\") pod \"nova-cell0-conductor-db-sync-z8vmq\" (UID: \"903b9bd6-7d66-4fbf-8dc6-dfd6a57bab2b\") " pod="openstack/nova-cell0-conductor-db-sync-z8vmq" Dec 05 17:35:44 crc kubenswrapper[4753]: I1205 17:35:44.838854 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/903b9bd6-7d66-4fbf-8dc6-dfd6a57bab2b-config-data\") pod \"nova-cell0-conductor-db-sync-z8vmq\" (UID: \"903b9bd6-7d66-4fbf-8dc6-dfd6a57bab2b\") " pod="openstack/nova-cell0-conductor-db-sync-z8vmq" Dec 05 17:35:44 crc kubenswrapper[4753]: I1205 17:35:44.839261 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/903b9bd6-7d66-4fbf-8dc6-dfd6a57bab2b-scripts\") pod \"nova-cell0-conductor-db-sync-z8vmq\" (UID: \"903b9bd6-7d66-4fbf-8dc6-dfd6a57bab2b\") " pod="openstack/nova-cell0-conductor-db-sync-z8vmq" Dec 05 17:35:44 crc kubenswrapper[4753]: I1205 17:35:44.839477 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/903b9bd6-7d66-4fbf-8dc6-dfd6a57bab2b-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-z8vmq\" (UID: \"903b9bd6-7d66-4fbf-8dc6-dfd6a57bab2b\") " pod="openstack/nova-cell0-conductor-db-sync-z8vmq" Dec 05 17:35:44 crc kubenswrapper[4753]: I1205 17:35:44.839574 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gh2v4\" (UniqueName: \"kubernetes.io/projected/903b9bd6-7d66-4fbf-8dc6-dfd6a57bab2b-kube-api-access-gh2v4\") pod \"nova-cell0-conductor-db-sync-z8vmq\" (UID: \"903b9bd6-7d66-4fbf-8dc6-dfd6a57bab2b\") " pod="openstack/nova-cell0-conductor-db-sync-z8vmq" Dec 05 17:35:44 crc kubenswrapper[4753]: I1205 17:35:44.848291 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/903b9bd6-7d66-4fbf-8dc6-dfd6a57bab2b-config-data\") pod \"nova-cell0-conductor-db-sync-z8vmq\" (UID: \"903b9bd6-7d66-4fbf-8dc6-dfd6a57bab2b\") " pod="openstack/nova-cell0-conductor-db-sync-z8vmq" Dec 05 17:35:44 crc kubenswrapper[4753]: I1205 17:35:44.848741 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/903b9bd6-7d66-4fbf-8dc6-dfd6a57bab2b-scripts\") pod \"nova-cell0-conductor-db-sync-z8vmq\" (UID: \"903b9bd6-7d66-4fbf-8dc6-dfd6a57bab2b\") " pod="openstack/nova-cell0-conductor-db-sync-z8vmq" Dec 05 17:35:44 crc kubenswrapper[4753]: I1205 17:35:44.848874 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/903b9bd6-7d66-4fbf-8dc6-dfd6a57bab2b-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-z8vmq\" (UID: \"903b9bd6-7d66-4fbf-8dc6-dfd6a57bab2b\") " pod="openstack/nova-cell0-conductor-db-sync-z8vmq" Dec 05 17:35:44 crc kubenswrapper[4753]: I1205 17:35:44.856990 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gh2v4\" (UniqueName: \"kubernetes.io/projected/903b9bd6-7d66-4fbf-8dc6-dfd6a57bab2b-kube-api-access-gh2v4\") pod \"nova-cell0-conductor-db-sync-z8vmq\" (UID: \"903b9bd6-7d66-4fbf-8dc6-dfd6a57bab2b\") " pod="openstack/nova-cell0-conductor-db-sync-z8vmq" Dec 05 17:35:45 crc kubenswrapper[4753]: I1205 17:35:45.034267 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-z8vmq" Dec 05 17:35:45 crc kubenswrapper[4753]: I1205 17:35:45.166811 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d50ea472-ae7e-45eb-b08c-f69dcb662af2","Type":"ContainerStarted","Data":"3986fea860763f752ccc43dcf4d9359867d317545f3a7426a025165bd75cff38"} Dec 05 17:35:45 crc kubenswrapper[4753]: I1205 17:35:45.167289 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 17:35:45 crc kubenswrapper[4753]: I1205 17:35:45.193261 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.360327261 podStartE2EDuration="7.193241254s" podCreationTimestamp="2025-12-05 17:35:38 +0000 UTC" firstStartedPulling="2025-12-05 17:35:40.447796265 +0000 UTC m=+1878.950903271" lastFinishedPulling="2025-12-05 17:35:44.280710248 +0000 UTC m=+1882.783817264" observedRunningTime="2025-12-05 17:35:45.190794924 +0000 UTC m=+1883.693901930" watchObservedRunningTime="2025-12-05 17:35:45.193241254 +0000 UTC m=+1883.696348250" Dec 05 17:35:45 crc kubenswrapper[4753]: I1205 17:35:45.207909 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 05 17:35:45 crc kubenswrapper[4753]: I1205 17:35:45.207956 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 05 17:35:45 crc kubenswrapper[4753]: I1205 17:35:45.248412 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 05 17:35:45 crc kubenswrapper[4753]: I1205 17:35:45.277212 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 05 17:35:45 crc kubenswrapper[4753]: I1205 17:35:45.369735 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:35:45 crc kubenswrapper[4753]: I1205 17:35:45.670608 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-z8vmq"] Dec 05 17:35:46 crc kubenswrapper[4753]: I1205 17:35:46.179703 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-z8vmq" event={"ID":"903b9bd6-7d66-4fbf-8dc6-dfd6a57bab2b","Type":"ContainerStarted","Data":"7e1540449dc06fc739461889f36cdec616af3afa14d8efee6f79ad1a9d11be41"} Dec 05 17:35:46 crc kubenswrapper[4753]: I1205 17:35:46.180125 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 05 17:35:46 crc kubenswrapper[4753]: I1205 17:35:46.180174 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 05 17:35:46 crc kubenswrapper[4753]: I1205 17:35:46.405122 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 05 17:35:46 crc kubenswrapper[4753]: I1205 17:35:46.405478 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 05 17:35:46 crc kubenswrapper[4753]: I1205 17:35:46.473742 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 05 17:35:46 crc kubenswrapper[4753]: I1205 17:35:46.499914 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 05 17:35:46 crc kubenswrapper[4753]: I1205 17:35:46.956322 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-api-0" Dec 05 17:35:47 crc kubenswrapper[4753]: I1205 17:35:47.189465 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d50ea472-ae7e-45eb-b08c-f69dcb662af2" containerName="ceilometer-central-agent" containerID="cri-o://f002ecf75d7df9729692cc36a71ab6d1e1cb8778fbd4d24ebf563bab2c8f5fbb" gracePeriod=30 Dec 05 17:35:47 crc kubenswrapper[4753]: I1205 17:35:47.189899 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d50ea472-ae7e-45eb-b08c-f69dcb662af2" containerName="proxy-httpd" containerID="cri-o://3986fea860763f752ccc43dcf4d9359867d317545f3a7426a025165bd75cff38" gracePeriod=30 Dec 05 17:35:47 crc kubenswrapper[4753]: I1205 17:35:47.189939 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d50ea472-ae7e-45eb-b08c-f69dcb662af2" containerName="sg-core" containerID="cri-o://c8c9cb43224f3bbe31fe7cd8fd542ad904c54e1979cae0efc475a56eec8e0f17" gracePeriod=30 Dec 05 17:35:47 crc kubenswrapper[4753]: I1205 17:35:47.189966 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d50ea472-ae7e-45eb-b08c-f69dcb662af2" containerName="ceilometer-notification-agent" containerID="cri-o://4e4528765c9d046af370996c1510c6447f560908171e1d7adb81bd34c10f0b96" gracePeriod=30 Dec 05 17:35:47 crc kubenswrapper[4753]: I1205 17:35:47.190598 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 05 17:35:47 crc kubenswrapper[4753]: I1205 17:35:47.190777 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 05 17:35:48 crc kubenswrapper[4753]: I1205 17:35:48.204858 4753 generic.go:334] "Generic (PLEG): container finished" podID="d50ea472-ae7e-45eb-b08c-f69dcb662af2" containerID="3986fea860763f752ccc43dcf4d9359867d317545f3a7426a025165bd75cff38" exitCode=0 Dec 05 17:35:48 crc kubenswrapper[4753]: I1205 17:35:48.205550 4753 generic.go:334] "Generic (PLEG): container finished" podID="d50ea472-ae7e-45eb-b08c-f69dcb662af2" containerID="c8c9cb43224f3bbe31fe7cd8fd542ad904c54e1979cae0efc475a56eec8e0f17" exitCode=2 Dec 05 17:35:48 crc kubenswrapper[4753]: I1205 17:35:48.205565 4753 generic.go:334] "Generic (PLEG): container finished" podID="d50ea472-ae7e-45eb-b08c-f69dcb662af2" containerID="4e4528765c9d046af370996c1510c6447f560908171e1d7adb81bd34c10f0b96" exitCode=0 Dec 05 17:35:48 crc kubenswrapper[4753]: I1205 17:35:48.205063 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d50ea472-ae7e-45eb-b08c-f69dcb662af2","Type":"ContainerDied","Data":"3986fea860763f752ccc43dcf4d9359867d317545f3a7426a025165bd75cff38"} Dec 05 17:35:48 crc kubenswrapper[4753]: I1205 17:35:48.205675 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d50ea472-ae7e-45eb-b08c-f69dcb662af2","Type":"ContainerDied","Data":"c8c9cb43224f3bbe31fe7cd8fd542ad904c54e1979cae0efc475a56eec8e0f17"} Dec 05 17:35:48 crc kubenswrapper[4753]: I1205 17:35:48.205692 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d50ea472-ae7e-45eb-b08c-f69dcb662af2","Type":"ContainerDied","Data":"4e4528765c9d046af370996c1510c6447f560908171e1d7adb81bd34c10f0b96"} Dec 05 17:35:49 crc kubenswrapper[4753]: I1205 17:35:49.214454 4753 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 17:35:49 crc kubenswrapper[4753]: I1205 17:35:49.214492 4753 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 17:35:49 crc kubenswrapper[4753]: I1205 17:35:49.945540 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 05 17:35:49 crc kubenswrapper[4753]: I1205 17:35:49.946250 4753 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 17:35:50 crc kubenswrapper[4753]: I1205 17:35:50.021107 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 05 17:35:50 crc kubenswrapper[4753]: I1205 17:35:50.026314 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 05 17:35:50 crc kubenswrapper[4753]: I1205 17:35:50.038972 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 05 17:35:54 crc kubenswrapper[4753]: I1205 17:35:54.281280 4753 generic.go:334] "Generic (PLEG): container finished" podID="d50ea472-ae7e-45eb-b08c-f69dcb662af2" containerID="f002ecf75d7df9729692cc36a71ab6d1e1cb8778fbd4d24ebf563bab2c8f5fbb" exitCode=0 Dec 05 17:35:54 crc kubenswrapper[4753]: I1205 17:35:54.281377 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d50ea472-ae7e-45eb-b08c-f69dcb662af2","Type":"ContainerDied","Data":"f002ecf75d7df9729692cc36a71ab6d1e1cb8778fbd4d24ebf563bab2c8f5fbb"} Dec 05 17:35:54 crc kubenswrapper[4753]: I1205 17:35:54.720727 4753 scope.go:117] "RemoveContainer" containerID="87682a74661e693e498cd793cc20d16fc9f4a3b8a1a6b54f10285e2dcd15eafd" Dec 05 17:35:54 crc kubenswrapper[4753]: E1205 17:35:54.721034 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 17:35:57 crc kubenswrapper[4753]: I1205 17:35:57.780351 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 17:35:57 crc kubenswrapper[4753]: I1205 17:35:57.852403 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d50ea472-ae7e-45eb-b08c-f69dcb662af2-log-httpd\") pod \"d50ea472-ae7e-45eb-b08c-f69dcb662af2\" (UID: \"d50ea472-ae7e-45eb-b08c-f69dcb662af2\") " Dec 05 17:35:57 crc kubenswrapper[4753]: I1205 17:35:57.852457 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d50ea472-ae7e-45eb-b08c-f69dcb662af2-config-data\") pod \"d50ea472-ae7e-45eb-b08c-f69dcb662af2\" (UID: \"d50ea472-ae7e-45eb-b08c-f69dcb662af2\") " Dec 05 17:35:57 crc kubenswrapper[4753]: I1205 17:35:57.852551 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-69rsw\" (UniqueName: \"kubernetes.io/projected/d50ea472-ae7e-45eb-b08c-f69dcb662af2-kube-api-access-69rsw\") pod \"d50ea472-ae7e-45eb-b08c-f69dcb662af2\" (UID: \"d50ea472-ae7e-45eb-b08c-f69dcb662af2\") " Dec 05 17:35:57 crc kubenswrapper[4753]: I1205 17:35:57.852652 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d50ea472-ae7e-45eb-b08c-f69dcb662af2-combined-ca-bundle\") pod \"d50ea472-ae7e-45eb-b08c-f69dcb662af2\" (UID: \"d50ea472-ae7e-45eb-b08c-f69dcb662af2\") " Dec 05 17:35:57 crc kubenswrapper[4753]: I1205 17:35:57.852683 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d50ea472-ae7e-45eb-b08c-f69dcb662af2-run-httpd\") pod \"d50ea472-ae7e-45eb-b08c-f69dcb662af2\" (UID: \"d50ea472-ae7e-45eb-b08c-f69dcb662af2\") " Dec 05 17:35:57 crc kubenswrapper[4753]: I1205 17:35:57.852722 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d50ea472-ae7e-45eb-b08c-f69dcb662af2-sg-core-conf-yaml\") pod \"d50ea472-ae7e-45eb-b08c-f69dcb662af2\" (UID: \"d50ea472-ae7e-45eb-b08c-f69dcb662af2\") " Dec 05 17:35:57 crc kubenswrapper[4753]: I1205 17:35:57.852778 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d50ea472-ae7e-45eb-b08c-f69dcb662af2-scripts\") pod \"d50ea472-ae7e-45eb-b08c-f69dcb662af2\" (UID: \"d50ea472-ae7e-45eb-b08c-f69dcb662af2\") " Dec 05 17:35:57 crc kubenswrapper[4753]: I1205 17:35:57.852997 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d50ea472-ae7e-45eb-b08c-f69dcb662af2-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "d50ea472-ae7e-45eb-b08c-f69dcb662af2" (UID: "d50ea472-ae7e-45eb-b08c-f69dcb662af2"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:35:57 crc kubenswrapper[4753]: I1205 17:35:57.853475 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d50ea472-ae7e-45eb-b08c-f69dcb662af2-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "d50ea472-ae7e-45eb-b08c-f69dcb662af2" (UID: "d50ea472-ae7e-45eb-b08c-f69dcb662af2"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:35:57 crc kubenswrapper[4753]: I1205 17:35:57.854174 4753 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d50ea472-ae7e-45eb-b08c-f69dcb662af2-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:57 crc kubenswrapper[4753]: I1205 17:35:57.854198 4753 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d50ea472-ae7e-45eb-b08c-f69dcb662af2-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:57 crc kubenswrapper[4753]: I1205 17:35:57.857621 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d50ea472-ae7e-45eb-b08c-f69dcb662af2-kube-api-access-69rsw" (OuterVolumeSpecName: "kube-api-access-69rsw") pod "d50ea472-ae7e-45eb-b08c-f69dcb662af2" (UID: "d50ea472-ae7e-45eb-b08c-f69dcb662af2"). InnerVolumeSpecName "kube-api-access-69rsw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:35:57 crc kubenswrapper[4753]: I1205 17:35:57.859168 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d50ea472-ae7e-45eb-b08c-f69dcb662af2-scripts" (OuterVolumeSpecName: "scripts") pod "d50ea472-ae7e-45eb-b08c-f69dcb662af2" (UID: "d50ea472-ae7e-45eb-b08c-f69dcb662af2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:35:57 crc kubenswrapper[4753]: I1205 17:35:57.890105 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d50ea472-ae7e-45eb-b08c-f69dcb662af2-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "d50ea472-ae7e-45eb-b08c-f69dcb662af2" (UID: "d50ea472-ae7e-45eb-b08c-f69dcb662af2"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:35:57 crc kubenswrapper[4753]: I1205 17:35:57.956581 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-69rsw\" (UniqueName: \"kubernetes.io/projected/d50ea472-ae7e-45eb-b08c-f69dcb662af2-kube-api-access-69rsw\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:57 crc kubenswrapper[4753]: I1205 17:35:57.956612 4753 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d50ea472-ae7e-45eb-b08c-f69dcb662af2-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:57 crc kubenswrapper[4753]: I1205 17:35:57.956625 4753 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d50ea472-ae7e-45eb-b08c-f69dcb662af2-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:57 crc kubenswrapper[4753]: I1205 17:35:57.961013 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d50ea472-ae7e-45eb-b08c-f69dcb662af2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d50ea472-ae7e-45eb-b08c-f69dcb662af2" (UID: "d50ea472-ae7e-45eb-b08c-f69dcb662af2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:35:57 crc kubenswrapper[4753]: I1205 17:35:57.980572 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d50ea472-ae7e-45eb-b08c-f69dcb662af2-config-data" (OuterVolumeSpecName: "config-data") pod "d50ea472-ae7e-45eb-b08c-f69dcb662af2" (UID: "d50ea472-ae7e-45eb-b08c-f69dcb662af2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:35:58 crc kubenswrapper[4753]: I1205 17:35:58.058170 4753 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d50ea472-ae7e-45eb-b08c-f69dcb662af2-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:58 crc kubenswrapper[4753]: I1205 17:35:58.058204 4753 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d50ea472-ae7e-45eb-b08c-f69dcb662af2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:58 crc kubenswrapper[4753]: I1205 17:35:58.344844 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-z8vmq" event={"ID":"903b9bd6-7d66-4fbf-8dc6-dfd6a57bab2b","Type":"ContainerStarted","Data":"3f192e10789b7558bdbed59f6ef14b0147358717debd0e32f57ce049fc9cff50"} Dec 05 17:35:58 crc kubenswrapper[4753]: I1205 17:35:58.350279 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d50ea472-ae7e-45eb-b08c-f69dcb662af2","Type":"ContainerDied","Data":"faff197016b8b63c1faade060af6489c5ecb434bf2be4324b8cd78a9c783c95a"} Dec 05 17:35:58 crc kubenswrapper[4753]: I1205 17:35:58.350369 4753 scope.go:117] "RemoveContainer" containerID="3986fea860763f752ccc43dcf4d9359867d317545f3a7426a025165bd75cff38" Dec 05 17:35:58 crc kubenswrapper[4753]: I1205 17:35:58.350388 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 17:35:58 crc kubenswrapper[4753]: I1205 17:35:58.388960 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-z8vmq" podStartSLOduration=2.588675109 podStartE2EDuration="14.388936859s" podCreationTimestamp="2025-12-05 17:35:44 +0000 UTC" firstStartedPulling="2025-12-05 17:35:45.658953625 +0000 UTC m=+1884.162060631" lastFinishedPulling="2025-12-05 17:35:57.459215365 +0000 UTC m=+1895.962322381" observedRunningTime="2025-12-05 17:35:58.387930361 +0000 UTC m=+1896.891037397" watchObservedRunningTime="2025-12-05 17:35:58.388936859 +0000 UTC m=+1896.892043865" Dec 05 17:35:58 crc kubenswrapper[4753]: I1205 17:35:58.392998 4753 scope.go:117] "RemoveContainer" containerID="c8c9cb43224f3bbe31fe7cd8fd542ad904c54e1979cae0efc475a56eec8e0f17" Dec 05 17:35:58 crc kubenswrapper[4753]: I1205 17:35:58.479796 4753 scope.go:117] "RemoveContainer" containerID="4e4528765c9d046af370996c1510c6447f560908171e1d7adb81bd34c10f0b96" Dec 05 17:35:58 crc kubenswrapper[4753]: I1205 17:35:58.494666 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:35:58 crc kubenswrapper[4753]: I1205 17:35:58.507642 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:35:58 crc kubenswrapper[4753]: I1205 17:35:58.533125 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:35:58 crc kubenswrapper[4753]: E1205 17:35:58.533657 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d50ea472-ae7e-45eb-b08c-f69dcb662af2" containerName="ceilometer-central-agent" Dec 05 17:35:58 crc kubenswrapper[4753]: I1205 17:35:58.533676 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="d50ea472-ae7e-45eb-b08c-f69dcb662af2" containerName="ceilometer-central-agent" Dec 05 17:35:58 crc kubenswrapper[4753]: E1205 17:35:58.533702 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d50ea472-ae7e-45eb-b08c-f69dcb662af2" containerName="ceilometer-notification-agent" Dec 05 17:35:58 crc kubenswrapper[4753]: I1205 17:35:58.533710 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="d50ea472-ae7e-45eb-b08c-f69dcb662af2" containerName="ceilometer-notification-agent" Dec 05 17:35:58 crc kubenswrapper[4753]: E1205 17:35:58.533725 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d50ea472-ae7e-45eb-b08c-f69dcb662af2" containerName="proxy-httpd" Dec 05 17:35:58 crc kubenswrapper[4753]: I1205 17:35:58.533733 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="d50ea472-ae7e-45eb-b08c-f69dcb662af2" containerName="proxy-httpd" Dec 05 17:35:58 crc kubenswrapper[4753]: E1205 17:35:58.533759 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d50ea472-ae7e-45eb-b08c-f69dcb662af2" containerName="sg-core" Dec 05 17:35:58 crc kubenswrapper[4753]: I1205 17:35:58.533766 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="d50ea472-ae7e-45eb-b08c-f69dcb662af2" containerName="sg-core" Dec 05 17:35:58 crc kubenswrapper[4753]: I1205 17:35:58.534001 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="d50ea472-ae7e-45eb-b08c-f69dcb662af2" containerName="ceilometer-notification-agent" Dec 05 17:35:58 crc kubenswrapper[4753]: I1205 17:35:58.534034 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="d50ea472-ae7e-45eb-b08c-f69dcb662af2" containerName="ceilometer-central-agent" Dec 05 17:35:58 crc kubenswrapper[4753]: I1205 17:35:58.534050 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="d50ea472-ae7e-45eb-b08c-f69dcb662af2" containerName="proxy-httpd" Dec 05 17:35:58 crc kubenswrapper[4753]: I1205 17:35:58.534062 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="d50ea472-ae7e-45eb-b08c-f69dcb662af2" containerName="sg-core" Dec 05 17:35:58 crc kubenswrapper[4753]: I1205 17:35:58.536373 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 17:35:58 crc kubenswrapper[4753]: I1205 17:35:58.537400 4753 scope.go:117] "RemoveContainer" containerID="f002ecf75d7df9729692cc36a71ab6d1e1cb8778fbd4d24ebf563bab2c8f5fbb" Dec 05 17:35:58 crc kubenswrapper[4753]: I1205 17:35:58.539574 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 17:35:58 crc kubenswrapper[4753]: I1205 17:35:58.539813 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 17:35:58 crc kubenswrapper[4753]: I1205 17:35:58.553162 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:35:58 crc kubenswrapper[4753]: I1205 17:35:58.572029 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/845f8129-9938-466e-97b0-7d9f543e2550-log-httpd\") pod \"ceilometer-0\" (UID: \"845f8129-9938-466e-97b0-7d9f543e2550\") " pod="openstack/ceilometer-0" Dec 05 17:35:58 crc kubenswrapper[4753]: I1205 17:35:58.572202 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/845f8129-9938-466e-97b0-7d9f543e2550-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"845f8129-9938-466e-97b0-7d9f543e2550\") " pod="openstack/ceilometer-0" Dec 05 17:35:58 crc kubenswrapper[4753]: I1205 17:35:58.572233 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p47bs\" (UniqueName: \"kubernetes.io/projected/845f8129-9938-466e-97b0-7d9f543e2550-kube-api-access-p47bs\") pod \"ceilometer-0\" (UID: \"845f8129-9938-466e-97b0-7d9f543e2550\") " pod="openstack/ceilometer-0" Dec 05 17:35:58 crc kubenswrapper[4753]: I1205 17:35:58.572278 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/845f8129-9938-466e-97b0-7d9f543e2550-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"845f8129-9938-466e-97b0-7d9f543e2550\") " pod="openstack/ceilometer-0" Dec 05 17:35:58 crc kubenswrapper[4753]: I1205 17:35:58.572355 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/845f8129-9938-466e-97b0-7d9f543e2550-run-httpd\") pod \"ceilometer-0\" (UID: \"845f8129-9938-466e-97b0-7d9f543e2550\") " pod="openstack/ceilometer-0" Dec 05 17:35:58 crc kubenswrapper[4753]: I1205 17:35:58.572390 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/845f8129-9938-466e-97b0-7d9f543e2550-scripts\") pod \"ceilometer-0\" (UID: \"845f8129-9938-466e-97b0-7d9f543e2550\") " pod="openstack/ceilometer-0" Dec 05 17:35:58 crc kubenswrapper[4753]: I1205 17:35:58.572418 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/845f8129-9938-466e-97b0-7d9f543e2550-config-data\") pod \"ceilometer-0\" (UID: \"845f8129-9938-466e-97b0-7d9f543e2550\") " pod="openstack/ceilometer-0" Dec 05 17:35:58 crc kubenswrapper[4753]: I1205 17:35:58.674025 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/845f8129-9938-466e-97b0-7d9f543e2550-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"845f8129-9938-466e-97b0-7d9f543e2550\") " pod="openstack/ceilometer-0" Dec 05 17:35:58 crc kubenswrapper[4753]: I1205 17:35:58.674079 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p47bs\" (UniqueName: \"kubernetes.io/projected/845f8129-9938-466e-97b0-7d9f543e2550-kube-api-access-p47bs\") pod \"ceilometer-0\" (UID: \"845f8129-9938-466e-97b0-7d9f543e2550\") " pod="openstack/ceilometer-0" Dec 05 17:35:58 crc kubenswrapper[4753]: I1205 17:35:58.674122 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/845f8129-9938-466e-97b0-7d9f543e2550-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"845f8129-9938-466e-97b0-7d9f543e2550\") " pod="openstack/ceilometer-0" Dec 05 17:35:58 crc kubenswrapper[4753]: I1205 17:35:58.674200 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/845f8129-9938-466e-97b0-7d9f543e2550-run-httpd\") pod \"ceilometer-0\" (UID: \"845f8129-9938-466e-97b0-7d9f543e2550\") " pod="openstack/ceilometer-0" Dec 05 17:35:58 crc kubenswrapper[4753]: I1205 17:35:58.674231 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/845f8129-9938-466e-97b0-7d9f543e2550-scripts\") pod \"ceilometer-0\" (UID: \"845f8129-9938-466e-97b0-7d9f543e2550\") " pod="openstack/ceilometer-0" Dec 05 17:35:58 crc kubenswrapper[4753]: I1205 17:35:58.674251 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/845f8129-9938-466e-97b0-7d9f543e2550-config-data\") pod \"ceilometer-0\" (UID: \"845f8129-9938-466e-97b0-7d9f543e2550\") " pod="openstack/ceilometer-0" Dec 05 17:35:58 crc kubenswrapper[4753]: I1205 17:35:58.674293 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/845f8129-9938-466e-97b0-7d9f543e2550-log-httpd\") pod \"ceilometer-0\" (UID: \"845f8129-9938-466e-97b0-7d9f543e2550\") " pod="openstack/ceilometer-0" Dec 05 17:35:58 crc kubenswrapper[4753]: I1205 17:35:58.674775 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/845f8129-9938-466e-97b0-7d9f543e2550-log-httpd\") pod \"ceilometer-0\" (UID: \"845f8129-9938-466e-97b0-7d9f543e2550\") " pod="openstack/ceilometer-0" Dec 05 17:35:58 crc kubenswrapper[4753]: I1205 17:35:58.675112 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/845f8129-9938-466e-97b0-7d9f543e2550-run-httpd\") pod \"ceilometer-0\" (UID: \"845f8129-9938-466e-97b0-7d9f543e2550\") " pod="openstack/ceilometer-0" Dec 05 17:35:58 crc kubenswrapper[4753]: I1205 17:35:58.680598 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/845f8129-9938-466e-97b0-7d9f543e2550-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"845f8129-9938-466e-97b0-7d9f543e2550\") " pod="openstack/ceilometer-0" Dec 05 17:35:58 crc kubenswrapper[4753]: I1205 17:35:58.681038 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/845f8129-9938-466e-97b0-7d9f543e2550-config-data\") pod \"ceilometer-0\" (UID: \"845f8129-9938-466e-97b0-7d9f543e2550\") " pod="openstack/ceilometer-0" Dec 05 17:35:58 crc kubenswrapper[4753]: I1205 17:35:58.685936 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/845f8129-9938-466e-97b0-7d9f543e2550-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"845f8129-9938-466e-97b0-7d9f543e2550\") " pod="openstack/ceilometer-0" Dec 05 17:35:58 crc kubenswrapper[4753]: I1205 17:35:58.688374 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/845f8129-9938-466e-97b0-7d9f543e2550-scripts\") pod \"ceilometer-0\" (UID: \"845f8129-9938-466e-97b0-7d9f543e2550\") " pod="openstack/ceilometer-0" Dec 05 17:35:58 crc kubenswrapper[4753]: I1205 17:35:58.695206 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p47bs\" (UniqueName: \"kubernetes.io/projected/845f8129-9938-466e-97b0-7d9f543e2550-kube-api-access-p47bs\") pod \"ceilometer-0\" (UID: \"845f8129-9938-466e-97b0-7d9f543e2550\") " pod="openstack/ceilometer-0" Dec 05 17:35:58 crc kubenswrapper[4753]: I1205 17:35:58.879523 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 17:35:59 crc kubenswrapper[4753]: I1205 17:35:59.455218 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:35:59 crc kubenswrapper[4753]: I1205 17:35:59.734894 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d50ea472-ae7e-45eb-b08c-f69dcb662af2" path="/var/lib/kubelet/pods/d50ea472-ae7e-45eb-b08c-f69dcb662af2/volumes" Dec 05 17:36:00 crc kubenswrapper[4753]: I1205 17:36:00.379470 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"845f8129-9938-466e-97b0-7d9f543e2550","Type":"ContainerStarted","Data":"4b9fb2b4f2010e35837ba67636a32951274452bdc063bf803a8d53a1cf51f373"} Dec 05 17:36:00 crc kubenswrapper[4753]: I1205 17:36:00.379835 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"845f8129-9938-466e-97b0-7d9f543e2550","Type":"ContainerStarted","Data":"65f1a541aa59836266bbdf7556d4099c91e1f4a1ae45fafa265afa597e330a1c"} Dec 05 17:36:01 crc kubenswrapper[4753]: I1205 17:36:01.392550 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"845f8129-9938-466e-97b0-7d9f543e2550","Type":"ContainerStarted","Data":"15d22364dd7fae564b8cf65f5e227c20e8a31f065fdfb27a20746185026ca508"} Dec 05 17:36:02 crc kubenswrapper[4753]: I1205 17:36:02.411660 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"845f8129-9938-466e-97b0-7d9f543e2550","Type":"ContainerStarted","Data":"d9d9921fbb8be6eb6483ed171caed7437d62b4b41cc48e60278296fdbcb16d4c"} Dec 05 17:36:03 crc kubenswrapper[4753]: I1205 17:36:03.332446 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:36:03 crc kubenswrapper[4753]: I1205 17:36:03.423304 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"845f8129-9938-466e-97b0-7d9f543e2550","Type":"ContainerStarted","Data":"61214eff942b4b978be3d8c47ea8c8d87c4cf7770e784621e5c97bc0d55cdaa8"} Dec 05 17:36:03 crc kubenswrapper[4753]: I1205 17:36:03.423495 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 17:36:03 crc kubenswrapper[4753]: I1205 17:36:03.452299 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.523410813 podStartE2EDuration="5.452270473s" podCreationTimestamp="2025-12-05 17:35:58 +0000 UTC" firstStartedPulling="2025-12-05 17:35:59.463245617 +0000 UTC m=+1897.966352623" lastFinishedPulling="2025-12-05 17:36:02.392105257 +0000 UTC m=+1900.895212283" observedRunningTime="2025-12-05 17:36:03.445073589 +0000 UTC m=+1901.948180595" watchObservedRunningTime="2025-12-05 17:36:03.452270473 +0000 UTC m=+1901.955377489" Dec 05 17:36:04 crc kubenswrapper[4753]: I1205 17:36:04.435943 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="845f8129-9938-466e-97b0-7d9f543e2550" containerName="ceilometer-central-agent" containerID="cri-o://4b9fb2b4f2010e35837ba67636a32951274452bdc063bf803a8d53a1cf51f373" gracePeriod=30 Dec 05 17:36:04 crc kubenswrapper[4753]: I1205 17:36:04.436006 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="845f8129-9938-466e-97b0-7d9f543e2550" containerName="sg-core" containerID="cri-o://d9d9921fbb8be6eb6483ed171caed7437d62b4b41cc48e60278296fdbcb16d4c" gracePeriod=30 Dec 05 17:36:04 crc kubenswrapper[4753]: I1205 17:36:04.435999 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="845f8129-9938-466e-97b0-7d9f543e2550" containerName="proxy-httpd" containerID="cri-o://61214eff942b4b978be3d8c47ea8c8d87c4cf7770e784621e5c97bc0d55cdaa8" gracePeriod=30 Dec 05 17:36:04 crc kubenswrapper[4753]: I1205 17:36:04.436052 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="845f8129-9938-466e-97b0-7d9f543e2550" containerName="ceilometer-notification-agent" containerID="cri-o://15d22364dd7fae564b8cf65f5e227c20e8a31f065fdfb27a20746185026ca508" gracePeriod=30 Dec 05 17:36:05 crc kubenswrapper[4753]: I1205 17:36:05.449704 4753 generic.go:334] "Generic (PLEG): container finished" podID="845f8129-9938-466e-97b0-7d9f543e2550" containerID="61214eff942b4b978be3d8c47ea8c8d87c4cf7770e784621e5c97bc0d55cdaa8" exitCode=0 Dec 05 17:36:05 crc kubenswrapper[4753]: I1205 17:36:05.450120 4753 generic.go:334] "Generic (PLEG): container finished" podID="845f8129-9938-466e-97b0-7d9f543e2550" containerID="d9d9921fbb8be6eb6483ed171caed7437d62b4b41cc48e60278296fdbcb16d4c" exitCode=2 Dec 05 17:36:05 crc kubenswrapper[4753]: I1205 17:36:05.450137 4753 generic.go:334] "Generic (PLEG): container finished" podID="845f8129-9938-466e-97b0-7d9f543e2550" containerID="15d22364dd7fae564b8cf65f5e227c20e8a31f065fdfb27a20746185026ca508" exitCode=0 Dec 05 17:36:05 crc kubenswrapper[4753]: I1205 17:36:05.449778 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"845f8129-9938-466e-97b0-7d9f543e2550","Type":"ContainerDied","Data":"61214eff942b4b978be3d8c47ea8c8d87c4cf7770e784621e5c97bc0d55cdaa8"} Dec 05 17:36:05 crc kubenswrapper[4753]: I1205 17:36:05.450219 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"845f8129-9938-466e-97b0-7d9f543e2550","Type":"ContainerDied","Data":"d9d9921fbb8be6eb6483ed171caed7437d62b4b41cc48e60278296fdbcb16d4c"} Dec 05 17:36:05 crc kubenswrapper[4753]: I1205 17:36:05.450245 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"845f8129-9938-466e-97b0-7d9f543e2550","Type":"ContainerDied","Data":"15d22364dd7fae564b8cf65f5e227c20e8a31f065fdfb27a20746185026ca508"} Dec 05 17:36:06 crc kubenswrapper[4753]: I1205 17:36:06.721190 4753 scope.go:117] "RemoveContainer" containerID="87682a74661e693e498cd793cc20d16fc9f4a3b8a1a6b54f10285e2dcd15eafd" Dec 05 17:36:06 crc kubenswrapper[4753]: E1205 17:36:06.721956 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 17:36:07 crc kubenswrapper[4753]: I1205 17:36:07.479833 4753 generic.go:334] "Generic (PLEG): container finished" podID="845f8129-9938-466e-97b0-7d9f543e2550" containerID="4b9fb2b4f2010e35837ba67636a32951274452bdc063bf803a8d53a1cf51f373" exitCode=0 Dec 05 17:36:07 crc kubenswrapper[4753]: I1205 17:36:07.480039 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"845f8129-9938-466e-97b0-7d9f543e2550","Type":"ContainerDied","Data":"4b9fb2b4f2010e35837ba67636a32951274452bdc063bf803a8d53a1cf51f373"} Dec 05 17:36:07 crc kubenswrapper[4753]: I1205 17:36:07.480165 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"845f8129-9938-466e-97b0-7d9f543e2550","Type":"ContainerDied","Data":"65f1a541aa59836266bbdf7556d4099c91e1f4a1ae45fafa265afa597e330a1c"} Dec 05 17:36:07 crc kubenswrapper[4753]: I1205 17:36:07.480183 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="65f1a541aa59836266bbdf7556d4099c91e1f4a1ae45fafa265afa597e330a1c" Dec 05 17:36:07 crc kubenswrapper[4753]: I1205 17:36:07.490174 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 17:36:07 crc kubenswrapper[4753]: I1205 17:36:07.576391 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p47bs\" (UniqueName: \"kubernetes.io/projected/845f8129-9938-466e-97b0-7d9f543e2550-kube-api-access-p47bs\") pod \"845f8129-9938-466e-97b0-7d9f543e2550\" (UID: \"845f8129-9938-466e-97b0-7d9f543e2550\") " Dec 05 17:36:07 crc kubenswrapper[4753]: I1205 17:36:07.576477 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/845f8129-9938-466e-97b0-7d9f543e2550-run-httpd\") pod \"845f8129-9938-466e-97b0-7d9f543e2550\" (UID: \"845f8129-9938-466e-97b0-7d9f543e2550\") " Dec 05 17:36:07 crc kubenswrapper[4753]: I1205 17:36:07.576622 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/845f8129-9938-466e-97b0-7d9f543e2550-combined-ca-bundle\") pod \"845f8129-9938-466e-97b0-7d9f543e2550\" (UID: \"845f8129-9938-466e-97b0-7d9f543e2550\") " Dec 05 17:36:07 crc kubenswrapper[4753]: I1205 17:36:07.576655 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/845f8129-9938-466e-97b0-7d9f543e2550-sg-core-conf-yaml\") pod \"845f8129-9938-466e-97b0-7d9f543e2550\" (UID: \"845f8129-9938-466e-97b0-7d9f543e2550\") " Dec 05 17:36:07 crc kubenswrapper[4753]: I1205 17:36:07.577105 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/845f8129-9938-466e-97b0-7d9f543e2550-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "845f8129-9938-466e-97b0-7d9f543e2550" (UID: "845f8129-9938-466e-97b0-7d9f543e2550"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:36:07 crc kubenswrapper[4753]: I1205 17:36:07.577514 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/845f8129-9938-466e-97b0-7d9f543e2550-log-httpd\") pod \"845f8129-9938-466e-97b0-7d9f543e2550\" (UID: \"845f8129-9938-466e-97b0-7d9f543e2550\") " Dec 05 17:36:07 crc kubenswrapper[4753]: I1205 17:36:07.577587 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/845f8129-9938-466e-97b0-7d9f543e2550-scripts\") pod \"845f8129-9938-466e-97b0-7d9f543e2550\" (UID: \"845f8129-9938-466e-97b0-7d9f543e2550\") " Dec 05 17:36:07 crc kubenswrapper[4753]: I1205 17:36:07.577613 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/845f8129-9938-466e-97b0-7d9f543e2550-config-data\") pod \"845f8129-9938-466e-97b0-7d9f543e2550\" (UID: \"845f8129-9938-466e-97b0-7d9f543e2550\") " Dec 05 17:36:07 crc kubenswrapper[4753]: I1205 17:36:07.577766 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/845f8129-9938-466e-97b0-7d9f543e2550-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "845f8129-9938-466e-97b0-7d9f543e2550" (UID: "845f8129-9938-466e-97b0-7d9f543e2550"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:36:07 crc kubenswrapper[4753]: I1205 17:36:07.578630 4753 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/845f8129-9938-466e-97b0-7d9f543e2550-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 17:36:07 crc kubenswrapper[4753]: I1205 17:36:07.578657 4753 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/845f8129-9938-466e-97b0-7d9f543e2550-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 17:36:07 crc kubenswrapper[4753]: I1205 17:36:07.594628 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/845f8129-9938-466e-97b0-7d9f543e2550-kube-api-access-p47bs" (OuterVolumeSpecName: "kube-api-access-p47bs") pod "845f8129-9938-466e-97b0-7d9f543e2550" (UID: "845f8129-9938-466e-97b0-7d9f543e2550"). InnerVolumeSpecName "kube-api-access-p47bs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:36:07 crc kubenswrapper[4753]: I1205 17:36:07.599911 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/845f8129-9938-466e-97b0-7d9f543e2550-scripts" (OuterVolumeSpecName: "scripts") pod "845f8129-9938-466e-97b0-7d9f543e2550" (UID: "845f8129-9938-466e-97b0-7d9f543e2550"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:36:07 crc kubenswrapper[4753]: I1205 17:36:07.654981 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/845f8129-9938-466e-97b0-7d9f543e2550-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "845f8129-9938-466e-97b0-7d9f543e2550" (UID: "845f8129-9938-466e-97b0-7d9f543e2550"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:36:07 crc kubenswrapper[4753]: I1205 17:36:07.678291 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/845f8129-9938-466e-97b0-7d9f543e2550-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "845f8129-9938-466e-97b0-7d9f543e2550" (UID: "845f8129-9938-466e-97b0-7d9f543e2550"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:36:07 crc kubenswrapper[4753]: I1205 17:36:07.680545 4753 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/845f8129-9938-466e-97b0-7d9f543e2550-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:36:07 crc kubenswrapper[4753]: I1205 17:36:07.680567 4753 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/845f8129-9938-466e-97b0-7d9f543e2550-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 17:36:07 crc kubenswrapper[4753]: I1205 17:36:07.680577 4753 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/845f8129-9938-466e-97b0-7d9f543e2550-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:36:07 crc kubenswrapper[4753]: I1205 17:36:07.680587 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p47bs\" (UniqueName: \"kubernetes.io/projected/845f8129-9938-466e-97b0-7d9f543e2550-kube-api-access-p47bs\") on node \"crc\" DevicePath \"\"" Dec 05 17:36:07 crc kubenswrapper[4753]: I1205 17:36:07.699233 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/845f8129-9938-466e-97b0-7d9f543e2550-config-data" (OuterVolumeSpecName: "config-data") pod "845f8129-9938-466e-97b0-7d9f543e2550" (UID: "845f8129-9938-466e-97b0-7d9f543e2550"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:36:07 crc kubenswrapper[4753]: I1205 17:36:07.782195 4753 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/845f8129-9938-466e-97b0-7d9f543e2550-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:36:08 crc kubenswrapper[4753]: I1205 17:36:08.492574 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 17:36:08 crc kubenswrapper[4753]: I1205 17:36:08.522974 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:36:08 crc kubenswrapper[4753]: I1205 17:36:08.546770 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:36:08 crc kubenswrapper[4753]: I1205 17:36:08.562949 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:36:08 crc kubenswrapper[4753]: E1205 17:36:08.563598 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="845f8129-9938-466e-97b0-7d9f543e2550" containerName="ceilometer-notification-agent" Dec 05 17:36:08 crc kubenswrapper[4753]: I1205 17:36:08.563628 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="845f8129-9938-466e-97b0-7d9f543e2550" containerName="ceilometer-notification-agent" Dec 05 17:36:08 crc kubenswrapper[4753]: E1205 17:36:08.563653 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="845f8129-9938-466e-97b0-7d9f543e2550" containerName="proxy-httpd" Dec 05 17:36:08 crc kubenswrapper[4753]: I1205 17:36:08.563661 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="845f8129-9938-466e-97b0-7d9f543e2550" containerName="proxy-httpd" Dec 05 17:36:08 crc kubenswrapper[4753]: E1205 17:36:08.563678 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="845f8129-9938-466e-97b0-7d9f543e2550" containerName="ceilometer-central-agent" Dec 05 17:36:08 crc kubenswrapper[4753]: I1205 17:36:08.563686 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="845f8129-9938-466e-97b0-7d9f543e2550" containerName="ceilometer-central-agent" Dec 05 17:36:08 crc kubenswrapper[4753]: E1205 17:36:08.563701 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="845f8129-9938-466e-97b0-7d9f543e2550" containerName="sg-core" Dec 05 17:36:08 crc kubenswrapper[4753]: I1205 17:36:08.563708 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="845f8129-9938-466e-97b0-7d9f543e2550" containerName="sg-core" Dec 05 17:36:08 crc kubenswrapper[4753]: I1205 17:36:08.563945 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="845f8129-9938-466e-97b0-7d9f543e2550" containerName="ceilometer-notification-agent" Dec 05 17:36:08 crc kubenswrapper[4753]: I1205 17:36:08.563971 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="845f8129-9938-466e-97b0-7d9f543e2550" containerName="sg-core" Dec 05 17:36:08 crc kubenswrapper[4753]: I1205 17:36:08.563996 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="845f8129-9938-466e-97b0-7d9f543e2550" containerName="ceilometer-central-agent" Dec 05 17:36:08 crc kubenswrapper[4753]: I1205 17:36:08.564013 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="845f8129-9938-466e-97b0-7d9f543e2550" containerName="proxy-httpd" Dec 05 17:36:08 crc kubenswrapper[4753]: I1205 17:36:08.568775 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 17:36:08 crc kubenswrapper[4753]: I1205 17:36:08.592987 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 17:36:08 crc kubenswrapper[4753]: I1205 17:36:08.593090 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 17:36:08 crc kubenswrapper[4753]: I1205 17:36:08.611018 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1efbaacd-213c-469a-8bee-1515c6eca23f-scripts\") pod \"ceilometer-0\" (UID: \"1efbaacd-213c-469a-8bee-1515c6eca23f\") " pod="openstack/ceilometer-0" Dec 05 17:36:08 crc kubenswrapper[4753]: I1205 17:36:08.611414 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1efbaacd-213c-469a-8bee-1515c6eca23f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"1efbaacd-213c-469a-8bee-1515c6eca23f\") " pod="openstack/ceilometer-0" Dec 05 17:36:08 crc kubenswrapper[4753]: I1205 17:36:08.611474 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1efbaacd-213c-469a-8bee-1515c6eca23f-run-httpd\") pod \"ceilometer-0\" (UID: \"1efbaacd-213c-469a-8bee-1515c6eca23f\") " pod="openstack/ceilometer-0" Dec 05 17:36:08 crc kubenswrapper[4753]: I1205 17:36:08.611515 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1efbaacd-213c-469a-8bee-1515c6eca23f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"1efbaacd-213c-469a-8bee-1515c6eca23f\") " pod="openstack/ceilometer-0" Dec 05 17:36:08 crc kubenswrapper[4753]: I1205 17:36:08.611579 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b6n9n\" (UniqueName: \"kubernetes.io/projected/1efbaacd-213c-469a-8bee-1515c6eca23f-kube-api-access-b6n9n\") pod \"ceilometer-0\" (UID: \"1efbaacd-213c-469a-8bee-1515c6eca23f\") " pod="openstack/ceilometer-0" Dec 05 17:36:08 crc kubenswrapper[4753]: I1205 17:36:08.611619 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1efbaacd-213c-469a-8bee-1515c6eca23f-log-httpd\") pod \"ceilometer-0\" (UID: \"1efbaacd-213c-469a-8bee-1515c6eca23f\") " pod="openstack/ceilometer-0" Dec 05 17:36:08 crc kubenswrapper[4753]: I1205 17:36:08.611649 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1efbaacd-213c-469a-8bee-1515c6eca23f-config-data\") pod \"ceilometer-0\" (UID: \"1efbaacd-213c-469a-8bee-1515c6eca23f\") " pod="openstack/ceilometer-0" Dec 05 17:36:08 crc kubenswrapper[4753]: I1205 17:36:08.620247 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:36:08 crc kubenswrapper[4753]: I1205 17:36:08.713067 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1efbaacd-213c-469a-8bee-1515c6eca23f-log-httpd\") pod \"ceilometer-0\" (UID: \"1efbaacd-213c-469a-8bee-1515c6eca23f\") " pod="openstack/ceilometer-0" Dec 05 17:36:08 crc kubenswrapper[4753]: I1205 17:36:08.713122 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1efbaacd-213c-469a-8bee-1515c6eca23f-config-data\") pod \"ceilometer-0\" (UID: \"1efbaacd-213c-469a-8bee-1515c6eca23f\") " pod="openstack/ceilometer-0" Dec 05 17:36:08 crc kubenswrapper[4753]: I1205 17:36:08.713220 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1efbaacd-213c-469a-8bee-1515c6eca23f-scripts\") pod \"ceilometer-0\" (UID: \"1efbaacd-213c-469a-8bee-1515c6eca23f\") " pod="openstack/ceilometer-0" Dec 05 17:36:08 crc kubenswrapper[4753]: I1205 17:36:08.713325 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1efbaacd-213c-469a-8bee-1515c6eca23f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"1efbaacd-213c-469a-8bee-1515c6eca23f\") " pod="openstack/ceilometer-0" Dec 05 17:36:08 crc kubenswrapper[4753]: I1205 17:36:08.713363 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1efbaacd-213c-469a-8bee-1515c6eca23f-run-httpd\") pod \"ceilometer-0\" (UID: \"1efbaacd-213c-469a-8bee-1515c6eca23f\") " pod="openstack/ceilometer-0" Dec 05 17:36:08 crc kubenswrapper[4753]: I1205 17:36:08.713383 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1efbaacd-213c-469a-8bee-1515c6eca23f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"1efbaacd-213c-469a-8bee-1515c6eca23f\") " pod="openstack/ceilometer-0" Dec 05 17:36:08 crc kubenswrapper[4753]: I1205 17:36:08.713425 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b6n9n\" (UniqueName: \"kubernetes.io/projected/1efbaacd-213c-469a-8bee-1515c6eca23f-kube-api-access-b6n9n\") pod \"ceilometer-0\" (UID: \"1efbaacd-213c-469a-8bee-1515c6eca23f\") " pod="openstack/ceilometer-0" Dec 05 17:36:08 crc kubenswrapper[4753]: I1205 17:36:08.714342 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1efbaacd-213c-469a-8bee-1515c6eca23f-log-httpd\") pod \"ceilometer-0\" (UID: \"1efbaacd-213c-469a-8bee-1515c6eca23f\") " pod="openstack/ceilometer-0" Dec 05 17:36:08 crc kubenswrapper[4753]: I1205 17:36:08.715458 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1efbaacd-213c-469a-8bee-1515c6eca23f-run-httpd\") pod \"ceilometer-0\" (UID: \"1efbaacd-213c-469a-8bee-1515c6eca23f\") " pod="openstack/ceilometer-0" Dec 05 17:36:08 crc kubenswrapper[4753]: I1205 17:36:08.718496 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1efbaacd-213c-469a-8bee-1515c6eca23f-scripts\") pod \"ceilometer-0\" (UID: \"1efbaacd-213c-469a-8bee-1515c6eca23f\") " pod="openstack/ceilometer-0" Dec 05 17:36:08 crc kubenswrapper[4753]: I1205 17:36:08.718782 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1efbaacd-213c-469a-8bee-1515c6eca23f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"1efbaacd-213c-469a-8bee-1515c6eca23f\") " pod="openstack/ceilometer-0" Dec 05 17:36:08 crc kubenswrapper[4753]: I1205 17:36:08.719504 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1efbaacd-213c-469a-8bee-1515c6eca23f-config-data\") pod \"ceilometer-0\" (UID: \"1efbaacd-213c-469a-8bee-1515c6eca23f\") " pod="openstack/ceilometer-0" Dec 05 17:36:08 crc kubenswrapper[4753]: I1205 17:36:08.720275 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1efbaacd-213c-469a-8bee-1515c6eca23f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"1efbaacd-213c-469a-8bee-1515c6eca23f\") " pod="openstack/ceilometer-0" Dec 05 17:36:08 crc kubenswrapper[4753]: I1205 17:36:08.745223 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b6n9n\" (UniqueName: \"kubernetes.io/projected/1efbaacd-213c-469a-8bee-1515c6eca23f-kube-api-access-b6n9n\") pod \"ceilometer-0\" (UID: \"1efbaacd-213c-469a-8bee-1515c6eca23f\") " pod="openstack/ceilometer-0" Dec 05 17:36:08 crc kubenswrapper[4753]: I1205 17:36:08.918980 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 17:36:09 crc kubenswrapper[4753]: I1205 17:36:09.424326 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:36:09 crc kubenswrapper[4753]: I1205 17:36:09.504403 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1efbaacd-213c-469a-8bee-1515c6eca23f","Type":"ContainerStarted","Data":"ae1e1046a247edf08f74fb48cbdac4ed1ebe196a426c9febc1ac1649297f1ebc"} Dec 05 17:36:09 crc kubenswrapper[4753]: I1205 17:36:09.743666 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="845f8129-9938-466e-97b0-7d9f543e2550" path="/var/lib/kubelet/pods/845f8129-9938-466e-97b0-7d9f543e2550/volumes" Dec 05 17:36:09 crc kubenswrapper[4753]: I1205 17:36:09.927321 4753 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","podb33874b7-5966-44b0-9a9c-a555e52127b9"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort podb33874b7-5966-44b0-9a9c-a555e52127b9] : Timed out while waiting for systemd to remove kubepods-besteffort-podb33874b7_5966_44b0_9a9c_a555e52127b9.slice" Dec 05 17:36:10 crc kubenswrapper[4753]: I1205 17:36:10.516286 4753 generic.go:334] "Generic (PLEG): container finished" podID="903b9bd6-7d66-4fbf-8dc6-dfd6a57bab2b" containerID="3f192e10789b7558bdbed59f6ef14b0147358717debd0e32f57ce049fc9cff50" exitCode=0 Dec 05 17:36:10 crc kubenswrapper[4753]: I1205 17:36:10.516340 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-z8vmq" event={"ID":"903b9bd6-7d66-4fbf-8dc6-dfd6a57bab2b","Type":"ContainerDied","Data":"3f192e10789b7558bdbed59f6ef14b0147358717debd0e32f57ce049fc9cff50"} Dec 05 17:36:10 crc kubenswrapper[4753]: I1205 17:36:10.519650 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1efbaacd-213c-469a-8bee-1515c6eca23f","Type":"ContainerStarted","Data":"6f87bf3d869c1096d8e38ffab35104168444407c3ef76e70bffb4444008e1713"} Dec 05 17:36:10 crc kubenswrapper[4753]: I1205 17:36:10.668788 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:36:11 crc kubenswrapper[4753]: I1205 17:36:11.531268 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1efbaacd-213c-469a-8bee-1515c6eca23f","Type":"ContainerStarted","Data":"09b61f184af82ad4b86676a8b7c64259e3296ec380ac38334ff98b6e387e0833"} Dec 05 17:36:12 crc kubenswrapper[4753]: I1205 17:36:12.004434 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-z8vmq" Dec 05 17:36:12 crc kubenswrapper[4753]: I1205 17:36:12.083914 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gh2v4\" (UniqueName: \"kubernetes.io/projected/903b9bd6-7d66-4fbf-8dc6-dfd6a57bab2b-kube-api-access-gh2v4\") pod \"903b9bd6-7d66-4fbf-8dc6-dfd6a57bab2b\" (UID: \"903b9bd6-7d66-4fbf-8dc6-dfd6a57bab2b\") " Dec 05 17:36:12 crc kubenswrapper[4753]: I1205 17:36:12.084040 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/903b9bd6-7d66-4fbf-8dc6-dfd6a57bab2b-config-data\") pod \"903b9bd6-7d66-4fbf-8dc6-dfd6a57bab2b\" (UID: \"903b9bd6-7d66-4fbf-8dc6-dfd6a57bab2b\") " Dec 05 17:36:12 crc kubenswrapper[4753]: I1205 17:36:12.084087 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/903b9bd6-7d66-4fbf-8dc6-dfd6a57bab2b-scripts\") pod \"903b9bd6-7d66-4fbf-8dc6-dfd6a57bab2b\" (UID: \"903b9bd6-7d66-4fbf-8dc6-dfd6a57bab2b\") " Dec 05 17:36:12 crc kubenswrapper[4753]: I1205 17:36:12.084106 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/903b9bd6-7d66-4fbf-8dc6-dfd6a57bab2b-combined-ca-bundle\") pod \"903b9bd6-7d66-4fbf-8dc6-dfd6a57bab2b\" (UID: \"903b9bd6-7d66-4fbf-8dc6-dfd6a57bab2b\") " Dec 05 17:36:12 crc kubenswrapper[4753]: I1205 17:36:12.089651 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/903b9bd6-7d66-4fbf-8dc6-dfd6a57bab2b-scripts" (OuterVolumeSpecName: "scripts") pod "903b9bd6-7d66-4fbf-8dc6-dfd6a57bab2b" (UID: "903b9bd6-7d66-4fbf-8dc6-dfd6a57bab2b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:36:12 crc kubenswrapper[4753]: I1205 17:36:12.103551 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/903b9bd6-7d66-4fbf-8dc6-dfd6a57bab2b-kube-api-access-gh2v4" (OuterVolumeSpecName: "kube-api-access-gh2v4") pod "903b9bd6-7d66-4fbf-8dc6-dfd6a57bab2b" (UID: "903b9bd6-7d66-4fbf-8dc6-dfd6a57bab2b"). InnerVolumeSpecName "kube-api-access-gh2v4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:36:12 crc kubenswrapper[4753]: I1205 17:36:12.117566 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/903b9bd6-7d66-4fbf-8dc6-dfd6a57bab2b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "903b9bd6-7d66-4fbf-8dc6-dfd6a57bab2b" (UID: "903b9bd6-7d66-4fbf-8dc6-dfd6a57bab2b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:36:12 crc kubenswrapper[4753]: I1205 17:36:12.128172 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/903b9bd6-7d66-4fbf-8dc6-dfd6a57bab2b-config-data" (OuterVolumeSpecName: "config-data") pod "903b9bd6-7d66-4fbf-8dc6-dfd6a57bab2b" (UID: "903b9bd6-7d66-4fbf-8dc6-dfd6a57bab2b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:36:12 crc kubenswrapper[4753]: I1205 17:36:12.186160 4753 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/903b9bd6-7d66-4fbf-8dc6-dfd6a57bab2b-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:36:12 crc kubenswrapper[4753]: I1205 17:36:12.186194 4753 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/903b9bd6-7d66-4fbf-8dc6-dfd6a57bab2b-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:36:12 crc kubenswrapper[4753]: I1205 17:36:12.186203 4753 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/903b9bd6-7d66-4fbf-8dc6-dfd6a57bab2b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:36:12 crc kubenswrapper[4753]: I1205 17:36:12.186216 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gh2v4\" (UniqueName: \"kubernetes.io/projected/903b9bd6-7d66-4fbf-8dc6-dfd6a57bab2b-kube-api-access-gh2v4\") on node \"crc\" DevicePath \"\"" Dec 05 17:36:12 crc kubenswrapper[4753]: I1205 17:36:12.571422 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-z8vmq" event={"ID":"903b9bd6-7d66-4fbf-8dc6-dfd6a57bab2b","Type":"ContainerDied","Data":"7e1540449dc06fc739461889f36cdec616af3afa14d8efee6f79ad1a9d11be41"} Dec 05 17:36:12 crc kubenswrapper[4753]: I1205 17:36:12.571470 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7e1540449dc06fc739461889f36cdec616af3afa14d8efee6f79ad1a9d11be41" Dec 05 17:36:12 crc kubenswrapper[4753]: I1205 17:36:12.571555 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-z8vmq" Dec 05 17:36:12 crc kubenswrapper[4753]: I1205 17:36:12.657620 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 17:36:12 crc kubenswrapper[4753]: E1205 17:36:12.658454 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="903b9bd6-7d66-4fbf-8dc6-dfd6a57bab2b" containerName="nova-cell0-conductor-db-sync" Dec 05 17:36:12 crc kubenswrapper[4753]: I1205 17:36:12.658626 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="903b9bd6-7d66-4fbf-8dc6-dfd6a57bab2b" containerName="nova-cell0-conductor-db-sync" Dec 05 17:36:12 crc kubenswrapper[4753]: I1205 17:36:12.659023 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="903b9bd6-7d66-4fbf-8dc6-dfd6a57bab2b" containerName="nova-cell0-conductor-db-sync" Dec 05 17:36:12 crc kubenswrapper[4753]: I1205 17:36:12.660202 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 05 17:36:12 crc kubenswrapper[4753]: I1205 17:36:12.662830 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-blgqz" Dec 05 17:36:12 crc kubenswrapper[4753]: I1205 17:36:12.663064 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 05 17:36:12 crc kubenswrapper[4753]: I1205 17:36:12.672748 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 17:36:12 crc kubenswrapper[4753]: I1205 17:36:12.714955 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ldqsc\" (UniqueName: \"kubernetes.io/projected/0295546c-bece-4b19-ae35-8188830dab3b-kube-api-access-ldqsc\") pod \"nova-cell0-conductor-0\" (UID: \"0295546c-bece-4b19-ae35-8188830dab3b\") " pod="openstack/nova-cell0-conductor-0" Dec 05 17:36:12 crc kubenswrapper[4753]: I1205 17:36:12.715287 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0295546c-bece-4b19-ae35-8188830dab3b-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"0295546c-bece-4b19-ae35-8188830dab3b\") " pod="openstack/nova-cell0-conductor-0" Dec 05 17:36:12 crc kubenswrapper[4753]: I1205 17:36:12.715372 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0295546c-bece-4b19-ae35-8188830dab3b-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"0295546c-bece-4b19-ae35-8188830dab3b\") " pod="openstack/nova-cell0-conductor-0" Dec 05 17:36:12 crc kubenswrapper[4753]: I1205 17:36:12.816987 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ldqsc\" (UniqueName: \"kubernetes.io/projected/0295546c-bece-4b19-ae35-8188830dab3b-kube-api-access-ldqsc\") pod \"nova-cell0-conductor-0\" (UID: \"0295546c-bece-4b19-ae35-8188830dab3b\") " pod="openstack/nova-cell0-conductor-0" Dec 05 17:36:12 crc kubenswrapper[4753]: I1205 17:36:12.817069 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0295546c-bece-4b19-ae35-8188830dab3b-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"0295546c-bece-4b19-ae35-8188830dab3b\") " pod="openstack/nova-cell0-conductor-0" Dec 05 17:36:12 crc kubenswrapper[4753]: I1205 17:36:12.817214 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0295546c-bece-4b19-ae35-8188830dab3b-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"0295546c-bece-4b19-ae35-8188830dab3b\") " pod="openstack/nova-cell0-conductor-0" Dec 05 17:36:12 crc kubenswrapper[4753]: I1205 17:36:12.821442 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0295546c-bece-4b19-ae35-8188830dab3b-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"0295546c-bece-4b19-ae35-8188830dab3b\") " pod="openstack/nova-cell0-conductor-0" Dec 05 17:36:12 crc kubenswrapper[4753]: I1205 17:36:12.821876 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0295546c-bece-4b19-ae35-8188830dab3b-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"0295546c-bece-4b19-ae35-8188830dab3b\") " pod="openstack/nova-cell0-conductor-0" Dec 05 17:36:12 crc kubenswrapper[4753]: I1205 17:36:12.836796 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ldqsc\" (UniqueName: \"kubernetes.io/projected/0295546c-bece-4b19-ae35-8188830dab3b-kube-api-access-ldqsc\") pod \"nova-cell0-conductor-0\" (UID: \"0295546c-bece-4b19-ae35-8188830dab3b\") " pod="openstack/nova-cell0-conductor-0" Dec 05 17:36:13 crc kubenswrapper[4753]: I1205 17:36:13.001193 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 05 17:36:13 crc kubenswrapper[4753]: W1205 17:36:13.523354 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0295546c_bece_4b19_ae35_8188830dab3b.slice/crio-8aa9177ea87a4606aef2bb7d3f3bc57a830828581101e7dbfbf5cc114a3fa8e7 WatchSource:0}: Error finding container 8aa9177ea87a4606aef2bb7d3f3bc57a830828581101e7dbfbf5cc114a3fa8e7: Status 404 returned error can't find the container with id 8aa9177ea87a4606aef2bb7d3f3bc57a830828581101e7dbfbf5cc114a3fa8e7 Dec 05 17:36:13 crc kubenswrapper[4753]: I1205 17:36:13.526885 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 17:36:13 crc kubenswrapper[4753]: I1205 17:36:13.596303 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1efbaacd-213c-469a-8bee-1515c6eca23f","Type":"ContainerStarted","Data":"ce5ae343c6b08361e367780c1ec66314abc9da154cfd9b5c697928b61c6018d0"} Dec 05 17:36:13 crc kubenswrapper[4753]: I1205 17:36:13.603034 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"0295546c-bece-4b19-ae35-8188830dab3b","Type":"ContainerStarted","Data":"8aa9177ea87a4606aef2bb7d3f3bc57a830828581101e7dbfbf5cc114a3fa8e7"} Dec 05 17:36:14 crc kubenswrapper[4753]: I1205 17:36:14.634826 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"0295546c-bece-4b19-ae35-8188830dab3b","Type":"ContainerStarted","Data":"9004da8fd891630f105bc07b7bce4e977f9bb9e4c6dc5eccb9be4ea481c9d79c"} Dec 05 17:36:14 crc kubenswrapper[4753]: I1205 17:36:14.636702 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Dec 05 17:36:14 crc kubenswrapper[4753]: I1205 17:36:14.658047 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1efbaacd-213c-469a-8bee-1515c6eca23f","Type":"ContainerStarted","Data":"b50ade1ebfeeab991527de477868f1a7248194c309a429efa4f797a4e626cc9b"} Dec 05 17:36:14 crc kubenswrapper[4753]: I1205 17:36:14.658295 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 17:36:14 crc kubenswrapper[4753]: I1205 17:36:14.658314 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="1efbaacd-213c-469a-8bee-1515c6eca23f" containerName="ceilometer-central-agent" containerID="cri-o://6f87bf3d869c1096d8e38ffab35104168444407c3ef76e70bffb4444008e1713" gracePeriod=30 Dec 05 17:36:14 crc kubenswrapper[4753]: I1205 17:36:14.658370 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="1efbaacd-213c-469a-8bee-1515c6eca23f" containerName="proxy-httpd" containerID="cri-o://b50ade1ebfeeab991527de477868f1a7248194c309a429efa4f797a4e626cc9b" gracePeriod=30 Dec 05 17:36:14 crc kubenswrapper[4753]: I1205 17:36:14.658488 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="1efbaacd-213c-469a-8bee-1515c6eca23f" containerName="ceilometer-notification-agent" containerID="cri-o://09b61f184af82ad4b86676a8b7c64259e3296ec380ac38334ff98b6e387e0833" gracePeriod=30 Dec 05 17:36:14 crc kubenswrapper[4753]: I1205 17:36:14.658632 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="1efbaacd-213c-469a-8bee-1515c6eca23f" containerName="sg-core" containerID="cri-o://ce5ae343c6b08361e367780c1ec66314abc9da154cfd9b5c697928b61c6018d0" gracePeriod=30 Dec 05 17:36:14 crc kubenswrapper[4753]: I1205 17:36:14.697741 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.6977131229999998 podStartE2EDuration="2.697713123s" podCreationTimestamp="2025-12-05 17:36:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:36:14.655441753 +0000 UTC m=+1913.158548789" watchObservedRunningTime="2025-12-05 17:36:14.697713123 +0000 UTC m=+1913.200820169" Dec 05 17:36:14 crc kubenswrapper[4753]: I1205 17:36:14.698670 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.36070765 podStartE2EDuration="6.69866016s" podCreationTimestamp="2025-12-05 17:36:08 +0000 UTC" firstStartedPulling="2025-12-05 17:36:09.412789249 +0000 UTC m=+1907.915896255" lastFinishedPulling="2025-12-05 17:36:13.750741769 +0000 UTC m=+1912.253848765" observedRunningTime="2025-12-05 17:36:14.681108312 +0000 UTC m=+1913.184215358" watchObservedRunningTime="2025-12-05 17:36:14.69866016 +0000 UTC m=+1913.201767206" Dec 05 17:36:15 crc kubenswrapper[4753]: I1205 17:36:15.680489 4753 generic.go:334] "Generic (PLEG): container finished" podID="1efbaacd-213c-469a-8bee-1515c6eca23f" containerID="b50ade1ebfeeab991527de477868f1a7248194c309a429efa4f797a4e626cc9b" exitCode=0 Dec 05 17:36:15 crc kubenswrapper[4753]: I1205 17:36:15.680907 4753 generic.go:334] "Generic (PLEG): container finished" podID="1efbaacd-213c-469a-8bee-1515c6eca23f" containerID="ce5ae343c6b08361e367780c1ec66314abc9da154cfd9b5c697928b61c6018d0" exitCode=2 Dec 05 17:36:15 crc kubenswrapper[4753]: I1205 17:36:15.680958 4753 generic.go:334] "Generic (PLEG): container finished" podID="1efbaacd-213c-469a-8bee-1515c6eca23f" containerID="09b61f184af82ad4b86676a8b7c64259e3296ec380ac38334ff98b6e387e0833" exitCode=0 Dec 05 17:36:15 crc kubenswrapper[4753]: I1205 17:36:15.680557 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1efbaacd-213c-469a-8bee-1515c6eca23f","Type":"ContainerDied","Data":"b50ade1ebfeeab991527de477868f1a7248194c309a429efa4f797a4e626cc9b"} Dec 05 17:36:15 crc kubenswrapper[4753]: I1205 17:36:15.681086 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1efbaacd-213c-469a-8bee-1515c6eca23f","Type":"ContainerDied","Data":"ce5ae343c6b08361e367780c1ec66314abc9da154cfd9b5c697928b61c6018d0"} Dec 05 17:36:15 crc kubenswrapper[4753]: I1205 17:36:15.681124 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1efbaacd-213c-469a-8bee-1515c6eca23f","Type":"ContainerDied","Data":"09b61f184af82ad4b86676a8b7c64259e3296ec380ac38334ff98b6e387e0833"} Dec 05 17:36:16 crc kubenswrapper[4753]: E1205 17:36:16.850737 4753 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1efbaacd_213c_469a_8bee_1515c6eca23f.slice/crio-6f87bf3d869c1096d8e38ffab35104168444407c3ef76e70bffb4444008e1713.scope\": RecentStats: unable to find data in memory cache]" Dec 05 17:36:17 crc kubenswrapper[4753]: I1205 17:36:17.284790 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 17:36:17 crc kubenswrapper[4753]: I1205 17:36:17.430120 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1efbaacd-213c-469a-8bee-1515c6eca23f-run-httpd\") pod \"1efbaacd-213c-469a-8bee-1515c6eca23f\" (UID: \"1efbaacd-213c-469a-8bee-1515c6eca23f\") " Dec 05 17:36:17 crc kubenswrapper[4753]: I1205 17:36:17.430846 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1efbaacd-213c-469a-8bee-1515c6eca23f-config-data\") pod \"1efbaacd-213c-469a-8bee-1515c6eca23f\" (UID: \"1efbaacd-213c-469a-8bee-1515c6eca23f\") " Dec 05 17:36:17 crc kubenswrapper[4753]: I1205 17:36:17.431005 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1efbaacd-213c-469a-8bee-1515c6eca23f-combined-ca-bundle\") pod \"1efbaacd-213c-469a-8bee-1515c6eca23f\" (UID: \"1efbaacd-213c-469a-8bee-1515c6eca23f\") " Dec 05 17:36:17 crc kubenswrapper[4753]: I1205 17:36:17.430702 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1efbaacd-213c-469a-8bee-1515c6eca23f-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "1efbaacd-213c-469a-8bee-1515c6eca23f" (UID: "1efbaacd-213c-469a-8bee-1515c6eca23f"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:36:17 crc kubenswrapper[4753]: I1205 17:36:17.431262 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1efbaacd-213c-469a-8bee-1515c6eca23f-scripts\") pod \"1efbaacd-213c-469a-8bee-1515c6eca23f\" (UID: \"1efbaacd-213c-469a-8bee-1515c6eca23f\") " Dec 05 17:36:17 crc kubenswrapper[4753]: I1205 17:36:17.431465 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1efbaacd-213c-469a-8bee-1515c6eca23f-log-httpd\") pod \"1efbaacd-213c-469a-8bee-1515c6eca23f\" (UID: \"1efbaacd-213c-469a-8bee-1515c6eca23f\") " Dec 05 17:36:17 crc kubenswrapper[4753]: I1205 17:36:17.431525 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1efbaacd-213c-469a-8bee-1515c6eca23f-sg-core-conf-yaml\") pod \"1efbaacd-213c-469a-8bee-1515c6eca23f\" (UID: \"1efbaacd-213c-469a-8bee-1515c6eca23f\") " Dec 05 17:36:17 crc kubenswrapper[4753]: I1205 17:36:17.431552 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b6n9n\" (UniqueName: \"kubernetes.io/projected/1efbaacd-213c-469a-8bee-1515c6eca23f-kube-api-access-b6n9n\") pod \"1efbaacd-213c-469a-8bee-1515c6eca23f\" (UID: \"1efbaacd-213c-469a-8bee-1515c6eca23f\") " Dec 05 17:36:17 crc kubenswrapper[4753]: I1205 17:36:17.431796 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1efbaacd-213c-469a-8bee-1515c6eca23f-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "1efbaacd-213c-469a-8bee-1515c6eca23f" (UID: "1efbaacd-213c-469a-8bee-1515c6eca23f"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:36:17 crc kubenswrapper[4753]: I1205 17:36:17.432313 4753 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1efbaacd-213c-469a-8bee-1515c6eca23f-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 17:36:17 crc kubenswrapper[4753]: I1205 17:36:17.432335 4753 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1efbaacd-213c-469a-8bee-1515c6eca23f-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 17:36:17 crc kubenswrapper[4753]: I1205 17:36:17.437512 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1efbaacd-213c-469a-8bee-1515c6eca23f-scripts" (OuterVolumeSpecName: "scripts") pod "1efbaacd-213c-469a-8bee-1515c6eca23f" (UID: "1efbaacd-213c-469a-8bee-1515c6eca23f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:36:17 crc kubenswrapper[4753]: I1205 17:36:17.438339 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1efbaacd-213c-469a-8bee-1515c6eca23f-kube-api-access-b6n9n" (OuterVolumeSpecName: "kube-api-access-b6n9n") pod "1efbaacd-213c-469a-8bee-1515c6eca23f" (UID: "1efbaacd-213c-469a-8bee-1515c6eca23f"). InnerVolumeSpecName "kube-api-access-b6n9n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:36:17 crc kubenswrapper[4753]: I1205 17:36:17.467358 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1efbaacd-213c-469a-8bee-1515c6eca23f-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "1efbaacd-213c-469a-8bee-1515c6eca23f" (UID: "1efbaacd-213c-469a-8bee-1515c6eca23f"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:36:17 crc kubenswrapper[4753]: I1205 17:36:17.534082 4753 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1efbaacd-213c-469a-8bee-1515c6eca23f-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:36:17 crc kubenswrapper[4753]: I1205 17:36:17.534132 4753 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1efbaacd-213c-469a-8bee-1515c6eca23f-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 17:36:17 crc kubenswrapper[4753]: I1205 17:36:17.534183 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b6n9n\" (UniqueName: \"kubernetes.io/projected/1efbaacd-213c-469a-8bee-1515c6eca23f-kube-api-access-b6n9n\") on node \"crc\" DevicePath \"\"" Dec 05 17:36:17 crc kubenswrapper[4753]: I1205 17:36:17.547736 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1efbaacd-213c-469a-8bee-1515c6eca23f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1efbaacd-213c-469a-8bee-1515c6eca23f" (UID: "1efbaacd-213c-469a-8bee-1515c6eca23f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:36:17 crc kubenswrapper[4753]: I1205 17:36:17.573743 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1efbaacd-213c-469a-8bee-1515c6eca23f-config-data" (OuterVolumeSpecName: "config-data") pod "1efbaacd-213c-469a-8bee-1515c6eca23f" (UID: "1efbaacd-213c-469a-8bee-1515c6eca23f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:36:17 crc kubenswrapper[4753]: I1205 17:36:17.635795 4753 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1efbaacd-213c-469a-8bee-1515c6eca23f-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:36:17 crc kubenswrapper[4753]: I1205 17:36:17.635832 4753 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1efbaacd-213c-469a-8bee-1515c6eca23f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:36:17 crc kubenswrapper[4753]: I1205 17:36:17.711413 4753 generic.go:334] "Generic (PLEG): container finished" podID="1efbaacd-213c-469a-8bee-1515c6eca23f" containerID="6f87bf3d869c1096d8e38ffab35104168444407c3ef76e70bffb4444008e1713" exitCode=0 Dec 05 17:36:17 crc kubenswrapper[4753]: I1205 17:36:17.711498 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 17:36:17 crc kubenswrapper[4753]: I1205 17:36:17.711493 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1efbaacd-213c-469a-8bee-1515c6eca23f","Type":"ContainerDied","Data":"6f87bf3d869c1096d8e38ffab35104168444407c3ef76e70bffb4444008e1713"} Dec 05 17:36:17 crc kubenswrapper[4753]: I1205 17:36:17.711753 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1efbaacd-213c-469a-8bee-1515c6eca23f","Type":"ContainerDied","Data":"ae1e1046a247edf08f74fb48cbdac4ed1ebe196a426c9febc1ac1649297f1ebc"} Dec 05 17:36:17 crc kubenswrapper[4753]: I1205 17:36:17.711809 4753 scope.go:117] "RemoveContainer" containerID="b50ade1ebfeeab991527de477868f1a7248194c309a429efa4f797a4e626cc9b" Dec 05 17:36:17 crc kubenswrapper[4753]: I1205 17:36:17.721607 4753 scope.go:117] "RemoveContainer" containerID="87682a74661e693e498cd793cc20d16fc9f4a3b8a1a6b54f10285e2dcd15eafd" Dec 05 17:36:17 crc kubenswrapper[4753]: E1205 17:36:17.722674 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 17:36:17 crc kubenswrapper[4753]: I1205 17:36:17.761287 4753 scope.go:117] "RemoveContainer" containerID="ce5ae343c6b08361e367780c1ec66314abc9da154cfd9b5c697928b61c6018d0" Dec 05 17:36:17 crc kubenswrapper[4753]: I1205 17:36:17.763748 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:36:17 crc kubenswrapper[4753]: I1205 17:36:17.794033 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:36:17 crc kubenswrapper[4753]: I1205 17:36:17.806925 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:36:17 crc kubenswrapper[4753]: E1205 17:36:17.807499 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1efbaacd-213c-469a-8bee-1515c6eca23f" containerName="proxy-httpd" Dec 05 17:36:17 crc kubenswrapper[4753]: I1205 17:36:17.807519 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="1efbaacd-213c-469a-8bee-1515c6eca23f" containerName="proxy-httpd" Dec 05 17:36:17 crc kubenswrapper[4753]: E1205 17:36:17.807536 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1efbaacd-213c-469a-8bee-1515c6eca23f" containerName="ceilometer-central-agent" Dec 05 17:36:17 crc kubenswrapper[4753]: I1205 17:36:17.807547 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="1efbaacd-213c-469a-8bee-1515c6eca23f" containerName="ceilometer-central-agent" Dec 05 17:36:17 crc kubenswrapper[4753]: E1205 17:36:17.807559 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1efbaacd-213c-469a-8bee-1515c6eca23f" containerName="ceilometer-notification-agent" Dec 05 17:36:17 crc kubenswrapper[4753]: I1205 17:36:17.807568 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="1efbaacd-213c-469a-8bee-1515c6eca23f" containerName="ceilometer-notification-agent" Dec 05 17:36:17 crc kubenswrapper[4753]: E1205 17:36:17.807596 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1efbaacd-213c-469a-8bee-1515c6eca23f" containerName="sg-core" Dec 05 17:36:17 crc kubenswrapper[4753]: I1205 17:36:17.807605 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="1efbaacd-213c-469a-8bee-1515c6eca23f" containerName="sg-core" Dec 05 17:36:17 crc kubenswrapper[4753]: I1205 17:36:17.807841 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="1efbaacd-213c-469a-8bee-1515c6eca23f" containerName="ceilometer-central-agent" Dec 05 17:36:17 crc kubenswrapper[4753]: I1205 17:36:17.807861 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="1efbaacd-213c-469a-8bee-1515c6eca23f" containerName="ceilometer-notification-agent" Dec 05 17:36:17 crc kubenswrapper[4753]: I1205 17:36:17.807875 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="1efbaacd-213c-469a-8bee-1515c6eca23f" containerName="proxy-httpd" Dec 05 17:36:17 crc kubenswrapper[4753]: I1205 17:36:17.807898 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="1efbaacd-213c-469a-8bee-1515c6eca23f" containerName="sg-core" Dec 05 17:36:17 crc kubenswrapper[4753]: I1205 17:36:17.810358 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 17:36:17 crc kubenswrapper[4753]: I1205 17:36:17.821460 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:36:17 crc kubenswrapper[4753]: I1205 17:36:17.838040 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 17:36:17 crc kubenswrapper[4753]: I1205 17:36:17.841035 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 17:36:17 crc kubenswrapper[4753]: I1205 17:36:17.875869 4753 scope.go:117] "RemoveContainer" containerID="09b61f184af82ad4b86676a8b7c64259e3296ec380ac38334ff98b6e387e0833" Dec 05 17:36:17 crc kubenswrapper[4753]: I1205 17:36:17.900835 4753 scope.go:117] "RemoveContainer" containerID="6f87bf3d869c1096d8e38ffab35104168444407c3ef76e70bffb4444008e1713" Dec 05 17:36:17 crc kubenswrapper[4753]: I1205 17:36:17.920694 4753 scope.go:117] "RemoveContainer" containerID="b50ade1ebfeeab991527de477868f1a7248194c309a429efa4f797a4e626cc9b" Dec 05 17:36:17 crc kubenswrapper[4753]: E1205 17:36:17.921215 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b50ade1ebfeeab991527de477868f1a7248194c309a429efa4f797a4e626cc9b\": container with ID starting with b50ade1ebfeeab991527de477868f1a7248194c309a429efa4f797a4e626cc9b not found: ID does not exist" containerID="b50ade1ebfeeab991527de477868f1a7248194c309a429efa4f797a4e626cc9b" Dec 05 17:36:17 crc kubenswrapper[4753]: I1205 17:36:17.921265 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b50ade1ebfeeab991527de477868f1a7248194c309a429efa4f797a4e626cc9b"} err="failed to get container status \"b50ade1ebfeeab991527de477868f1a7248194c309a429efa4f797a4e626cc9b\": rpc error: code = NotFound desc = could not find container \"b50ade1ebfeeab991527de477868f1a7248194c309a429efa4f797a4e626cc9b\": container with ID starting with b50ade1ebfeeab991527de477868f1a7248194c309a429efa4f797a4e626cc9b not found: ID does not exist" Dec 05 17:36:17 crc kubenswrapper[4753]: I1205 17:36:17.921293 4753 scope.go:117] "RemoveContainer" containerID="ce5ae343c6b08361e367780c1ec66314abc9da154cfd9b5c697928b61c6018d0" Dec 05 17:36:17 crc kubenswrapper[4753]: E1205 17:36:17.921592 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ce5ae343c6b08361e367780c1ec66314abc9da154cfd9b5c697928b61c6018d0\": container with ID starting with ce5ae343c6b08361e367780c1ec66314abc9da154cfd9b5c697928b61c6018d0 not found: ID does not exist" containerID="ce5ae343c6b08361e367780c1ec66314abc9da154cfd9b5c697928b61c6018d0" Dec 05 17:36:17 crc kubenswrapper[4753]: I1205 17:36:17.921631 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ce5ae343c6b08361e367780c1ec66314abc9da154cfd9b5c697928b61c6018d0"} err="failed to get container status \"ce5ae343c6b08361e367780c1ec66314abc9da154cfd9b5c697928b61c6018d0\": rpc error: code = NotFound desc = could not find container \"ce5ae343c6b08361e367780c1ec66314abc9da154cfd9b5c697928b61c6018d0\": container with ID starting with ce5ae343c6b08361e367780c1ec66314abc9da154cfd9b5c697928b61c6018d0 not found: ID does not exist" Dec 05 17:36:17 crc kubenswrapper[4753]: I1205 17:36:17.921660 4753 scope.go:117] "RemoveContainer" containerID="09b61f184af82ad4b86676a8b7c64259e3296ec380ac38334ff98b6e387e0833" Dec 05 17:36:17 crc kubenswrapper[4753]: E1205 17:36:17.921881 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"09b61f184af82ad4b86676a8b7c64259e3296ec380ac38334ff98b6e387e0833\": container with ID starting with 09b61f184af82ad4b86676a8b7c64259e3296ec380ac38334ff98b6e387e0833 not found: ID does not exist" containerID="09b61f184af82ad4b86676a8b7c64259e3296ec380ac38334ff98b6e387e0833" Dec 05 17:36:17 crc kubenswrapper[4753]: I1205 17:36:17.921908 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"09b61f184af82ad4b86676a8b7c64259e3296ec380ac38334ff98b6e387e0833"} err="failed to get container status \"09b61f184af82ad4b86676a8b7c64259e3296ec380ac38334ff98b6e387e0833\": rpc error: code = NotFound desc = could not find container \"09b61f184af82ad4b86676a8b7c64259e3296ec380ac38334ff98b6e387e0833\": container with ID starting with 09b61f184af82ad4b86676a8b7c64259e3296ec380ac38334ff98b6e387e0833 not found: ID does not exist" Dec 05 17:36:17 crc kubenswrapper[4753]: I1205 17:36:17.921925 4753 scope.go:117] "RemoveContainer" containerID="6f87bf3d869c1096d8e38ffab35104168444407c3ef76e70bffb4444008e1713" Dec 05 17:36:17 crc kubenswrapper[4753]: E1205 17:36:17.922138 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6f87bf3d869c1096d8e38ffab35104168444407c3ef76e70bffb4444008e1713\": container with ID starting with 6f87bf3d869c1096d8e38ffab35104168444407c3ef76e70bffb4444008e1713 not found: ID does not exist" containerID="6f87bf3d869c1096d8e38ffab35104168444407c3ef76e70bffb4444008e1713" Dec 05 17:36:17 crc kubenswrapper[4753]: I1205 17:36:17.922183 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6f87bf3d869c1096d8e38ffab35104168444407c3ef76e70bffb4444008e1713"} err="failed to get container status \"6f87bf3d869c1096d8e38ffab35104168444407c3ef76e70bffb4444008e1713\": rpc error: code = NotFound desc = could not find container \"6f87bf3d869c1096d8e38ffab35104168444407c3ef76e70bffb4444008e1713\": container with ID starting with 6f87bf3d869c1096d8e38ffab35104168444407c3ef76e70bffb4444008e1713 not found: ID does not exist" Dec 05 17:36:17 crc kubenswrapper[4753]: I1205 17:36:17.945249 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cdd3659b-07fa-436e-93f0-b6b55fcead58-run-httpd\") pod \"ceilometer-0\" (UID: \"cdd3659b-07fa-436e-93f0-b6b55fcead58\") " pod="openstack/ceilometer-0" Dec 05 17:36:17 crc kubenswrapper[4753]: I1205 17:36:17.945334 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dpf8j\" (UniqueName: \"kubernetes.io/projected/cdd3659b-07fa-436e-93f0-b6b55fcead58-kube-api-access-dpf8j\") pod \"ceilometer-0\" (UID: \"cdd3659b-07fa-436e-93f0-b6b55fcead58\") " pod="openstack/ceilometer-0" Dec 05 17:36:17 crc kubenswrapper[4753]: I1205 17:36:17.945420 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cdd3659b-07fa-436e-93f0-b6b55fcead58-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"cdd3659b-07fa-436e-93f0-b6b55fcead58\") " pod="openstack/ceilometer-0" Dec 05 17:36:17 crc kubenswrapper[4753]: I1205 17:36:17.945475 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cdd3659b-07fa-436e-93f0-b6b55fcead58-log-httpd\") pod \"ceilometer-0\" (UID: \"cdd3659b-07fa-436e-93f0-b6b55fcead58\") " pod="openstack/ceilometer-0" Dec 05 17:36:17 crc kubenswrapper[4753]: I1205 17:36:17.945795 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cdd3659b-07fa-436e-93f0-b6b55fcead58-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"cdd3659b-07fa-436e-93f0-b6b55fcead58\") " pod="openstack/ceilometer-0" Dec 05 17:36:17 crc kubenswrapper[4753]: I1205 17:36:17.945921 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cdd3659b-07fa-436e-93f0-b6b55fcead58-config-data\") pod \"ceilometer-0\" (UID: \"cdd3659b-07fa-436e-93f0-b6b55fcead58\") " pod="openstack/ceilometer-0" Dec 05 17:36:17 crc kubenswrapper[4753]: I1205 17:36:17.946008 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cdd3659b-07fa-436e-93f0-b6b55fcead58-scripts\") pod \"ceilometer-0\" (UID: \"cdd3659b-07fa-436e-93f0-b6b55fcead58\") " pod="openstack/ceilometer-0" Dec 05 17:36:18 crc kubenswrapper[4753]: I1205 17:36:18.048448 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cdd3659b-07fa-436e-93f0-b6b55fcead58-config-data\") pod \"ceilometer-0\" (UID: \"cdd3659b-07fa-436e-93f0-b6b55fcead58\") " pod="openstack/ceilometer-0" Dec 05 17:36:18 crc kubenswrapper[4753]: I1205 17:36:18.048524 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cdd3659b-07fa-436e-93f0-b6b55fcead58-scripts\") pod \"ceilometer-0\" (UID: \"cdd3659b-07fa-436e-93f0-b6b55fcead58\") " pod="openstack/ceilometer-0" Dec 05 17:36:18 crc kubenswrapper[4753]: I1205 17:36:18.048630 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cdd3659b-07fa-436e-93f0-b6b55fcead58-run-httpd\") pod \"ceilometer-0\" (UID: \"cdd3659b-07fa-436e-93f0-b6b55fcead58\") " pod="openstack/ceilometer-0" Dec 05 17:36:18 crc kubenswrapper[4753]: I1205 17:36:18.048658 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dpf8j\" (UniqueName: \"kubernetes.io/projected/cdd3659b-07fa-436e-93f0-b6b55fcead58-kube-api-access-dpf8j\") pod \"ceilometer-0\" (UID: \"cdd3659b-07fa-436e-93f0-b6b55fcead58\") " pod="openstack/ceilometer-0" Dec 05 17:36:18 crc kubenswrapper[4753]: I1205 17:36:18.048697 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cdd3659b-07fa-436e-93f0-b6b55fcead58-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"cdd3659b-07fa-436e-93f0-b6b55fcead58\") " pod="openstack/ceilometer-0" Dec 05 17:36:18 crc kubenswrapper[4753]: I1205 17:36:18.048727 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cdd3659b-07fa-436e-93f0-b6b55fcead58-log-httpd\") pod \"ceilometer-0\" (UID: \"cdd3659b-07fa-436e-93f0-b6b55fcead58\") " pod="openstack/ceilometer-0" Dec 05 17:36:18 crc kubenswrapper[4753]: I1205 17:36:18.048772 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cdd3659b-07fa-436e-93f0-b6b55fcead58-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"cdd3659b-07fa-436e-93f0-b6b55fcead58\") " pod="openstack/ceilometer-0" Dec 05 17:36:18 crc kubenswrapper[4753]: I1205 17:36:18.049648 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cdd3659b-07fa-436e-93f0-b6b55fcead58-run-httpd\") pod \"ceilometer-0\" (UID: \"cdd3659b-07fa-436e-93f0-b6b55fcead58\") " pod="openstack/ceilometer-0" Dec 05 17:36:18 crc kubenswrapper[4753]: I1205 17:36:18.049701 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cdd3659b-07fa-436e-93f0-b6b55fcead58-log-httpd\") pod \"ceilometer-0\" (UID: \"cdd3659b-07fa-436e-93f0-b6b55fcead58\") " pod="openstack/ceilometer-0" Dec 05 17:36:18 crc kubenswrapper[4753]: I1205 17:36:18.053017 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Dec 05 17:36:18 crc kubenswrapper[4753]: I1205 17:36:18.054088 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cdd3659b-07fa-436e-93f0-b6b55fcead58-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"cdd3659b-07fa-436e-93f0-b6b55fcead58\") " pod="openstack/ceilometer-0" Dec 05 17:36:18 crc kubenswrapper[4753]: I1205 17:36:18.055223 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cdd3659b-07fa-436e-93f0-b6b55fcead58-scripts\") pod \"ceilometer-0\" (UID: \"cdd3659b-07fa-436e-93f0-b6b55fcead58\") " pod="openstack/ceilometer-0" Dec 05 17:36:18 crc kubenswrapper[4753]: I1205 17:36:18.058800 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cdd3659b-07fa-436e-93f0-b6b55fcead58-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"cdd3659b-07fa-436e-93f0-b6b55fcead58\") " pod="openstack/ceilometer-0" Dec 05 17:36:18 crc kubenswrapper[4753]: I1205 17:36:18.058849 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cdd3659b-07fa-436e-93f0-b6b55fcead58-config-data\") pod \"ceilometer-0\" (UID: \"cdd3659b-07fa-436e-93f0-b6b55fcead58\") " pod="openstack/ceilometer-0" Dec 05 17:36:18 crc kubenswrapper[4753]: I1205 17:36:18.072737 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dpf8j\" (UniqueName: \"kubernetes.io/projected/cdd3659b-07fa-436e-93f0-b6b55fcead58-kube-api-access-dpf8j\") pod \"ceilometer-0\" (UID: \"cdd3659b-07fa-436e-93f0-b6b55fcead58\") " pod="openstack/ceilometer-0" Dec 05 17:36:18 crc kubenswrapper[4753]: I1205 17:36:18.180700 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 17:36:18 crc kubenswrapper[4753]: I1205 17:36:18.733279 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cdd3659b-07fa-436e-93f0-b6b55fcead58","Type":"ContainerStarted","Data":"93fa8d6df86e17a8f964479ad62ba06fe38d042eacee5141079308a9c19c5a69"} Dec 05 17:36:18 crc kubenswrapper[4753]: I1205 17:36:18.738334 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:36:18 crc kubenswrapper[4753]: I1205 17:36:18.822439 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-h8pkm"] Dec 05 17:36:18 crc kubenswrapper[4753]: I1205 17:36:18.824173 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-h8pkm" Dec 05 17:36:18 crc kubenswrapper[4753]: I1205 17:36:18.834324 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Dec 05 17:36:18 crc kubenswrapper[4753]: I1205 17:36:18.834495 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Dec 05 17:36:18 crc kubenswrapper[4753]: I1205 17:36:18.840529 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-h8pkm"] Dec 05 17:36:18 crc kubenswrapper[4753]: I1205 17:36:18.968827 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kkk4j\" (UniqueName: \"kubernetes.io/projected/db47010e-762f-430c-b384-215d4ea30192-kube-api-access-kkk4j\") pod \"nova-cell0-cell-mapping-h8pkm\" (UID: \"db47010e-762f-430c-b384-215d4ea30192\") " pod="openstack/nova-cell0-cell-mapping-h8pkm" Dec 05 17:36:18 crc kubenswrapper[4753]: I1205 17:36:18.968919 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db47010e-762f-430c-b384-215d4ea30192-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-h8pkm\" (UID: \"db47010e-762f-430c-b384-215d4ea30192\") " pod="openstack/nova-cell0-cell-mapping-h8pkm" Dec 05 17:36:18 crc kubenswrapper[4753]: I1205 17:36:18.969199 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db47010e-762f-430c-b384-215d4ea30192-config-data\") pod \"nova-cell0-cell-mapping-h8pkm\" (UID: \"db47010e-762f-430c-b384-215d4ea30192\") " pod="openstack/nova-cell0-cell-mapping-h8pkm" Dec 05 17:36:18 crc kubenswrapper[4753]: I1205 17:36:18.969495 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/db47010e-762f-430c-b384-215d4ea30192-scripts\") pod \"nova-cell0-cell-mapping-h8pkm\" (UID: \"db47010e-762f-430c-b384-215d4ea30192\") " pod="openstack/nova-cell0-cell-mapping-h8pkm" Dec 05 17:36:18 crc kubenswrapper[4753]: I1205 17:36:18.989748 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 17:36:18 crc kubenswrapper[4753]: I1205 17:36:18.991184 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 17:36:18 crc kubenswrapper[4753]: I1205 17:36:18.995207 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.002811 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.059192 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.061050 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.065628 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.070975 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kkk4j\" (UniqueName: \"kubernetes.io/projected/db47010e-762f-430c-b384-215d4ea30192-kube-api-access-kkk4j\") pod \"nova-cell0-cell-mapping-h8pkm\" (UID: \"db47010e-762f-430c-b384-215d4ea30192\") " pod="openstack/nova-cell0-cell-mapping-h8pkm" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.071045 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db47010e-762f-430c-b384-215d4ea30192-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-h8pkm\" (UID: \"db47010e-762f-430c-b384-215d4ea30192\") " pod="openstack/nova-cell0-cell-mapping-h8pkm" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.071097 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db47010e-762f-430c-b384-215d4ea30192-config-data\") pod \"nova-cell0-cell-mapping-h8pkm\" (UID: \"db47010e-762f-430c-b384-215d4ea30192\") " pod="openstack/nova-cell0-cell-mapping-h8pkm" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.071198 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/db47010e-762f-430c-b384-215d4ea30192-scripts\") pod \"nova-cell0-cell-mapping-h8pkm\" (UID: \"db47010e-762f-430c-b384-215d4ea30192\") " pod="openstack/nova-cell0-cell-mapping-h8pkm" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.092427 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db47010e-762f-430c-b384-215d4ea30192-config-data\") pod \"nova-cell0-cell-mapping-h8pkm\" (UID: \"db47010e-762f-430c-b384-215d4ea30192\") " pod="openstack/nova-cell0-cell-mapping-h8pkm" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.094826 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db47010e-762f-430c-b384-215d4ea30192-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-h8pkm\" (UID: \"db47010e-762f-430c-b384-215d4ea30192\") " pod="openstack/nova-cell0-cell-mapping-h8pkm" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.115057 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/db47010e-762f-430c-b384-215d4ea30192-scripts\") pod \"nova-cell0-cell-mapping-h8pkm\" (UID: \"db47010e-762f-430c-b384-215d4ea30192\") " pod="openstack/nova-cell0-cell-mapping-h8pkm" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.122294 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kkk4j\" (UniqueName: \"kubernetes.io/projected/db47010e-762f-430c-b384-215d4ea30192-kube-api-access-kkk4j\") pod \"nova-cell0-cell-mapping-h8pkm\" (UID: \"db47010e-762f-430c-b384-215d4ea30192\") " pod="openstack/nova-cell0-cell-mapping-h8pkm" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.135298 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.161762 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-h8pkm" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.174651 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ded4654b-7fab-4413-a3e9-f6b746d6c690-config-data\") pod \"nova-api-0\" (UID: \"ded4654b-7fab-4413-a3e9-f6b746d6c690\") " pod="openstack/nova-api-0" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.174740 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n99n9\" (UniqueName: \"kubernetes.io/projected/ded4654b-7fab-4413-a3e9-f6b746d6c690-kube-api-access-n99n9\") pod \"nova-api-0\" (UID: \"ded4654b-7fab-4413-a3e9-f6b746d6c690\") " pod="openstack/nova-api-0" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.174785 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d922n\" (UniqueName: \"kubernetes.io/projected/bf5f2bae-4136-4518-9ef0-058628b17355-kube-api-access-d922n\") pod \"nova-scheduler-0\" (UID: \"bf5f2bae-4136-4518-9ef0-058628b17355\") " pod="openstack/nova-scheduler-0" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.174818 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf5f2bae-4136-4518-9ef0-058628b17355-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"bf5f2bae-4136-4518-9ef0-058628b17355\") " pod="openstack/nova-scheduler-0" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.174853 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ded4654b-7fab-4413-a3e9-f6b746d6c690-logs\") pod \"nova-api-0\" (UID: \"ded4654b-7fab-4413-a3e9-f6b746d6c690\") " pod="openstack/nova-api-0" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.174905 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ded4654b-7fab-4413-a3e9-f6b746d6c690-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"ded4654b-7fab-4413-a3e9-f6b746d6c690\") " pod="openstack/nova-api-0" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.174970 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf5f2bae-4136-4518-9ef0-058628b17355-config-data\") pod \"nova-scheduler-0\" (UID: \"bf5f2bae-4136-4518-9ef0-058628b17355\") " pod="openstack/nova-scheduler-0" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.177305 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.179372 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.185703 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.281698 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28c73692-1325-4815-83ed-e0ae3aec6901-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"28c73692-1325-4815-83ed-e0ae3aec6901\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.281972 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ded4654b-7fab-4413-a3e9-f6b746d6c690-config-data\") pod \"nova-api-0\" (UID: \"ded4654b-7fab-4413-a3e9-f6b746d6c690\") " pod="openstack/nova-api-0" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.281996 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9fpfw\" (UniqueName: \"kubernetes.io/projected/28c73692-1325-4815-83ed-e0ae3aec6901-kube-api-access-9fpfw\") pod \"nova-cell1-novncproxy-0\" (UID: \"28c73692-1325-4815-83ed-e0ae3aec6901\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.282042 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n99n9\" (UniqueName: \"kubernetes.io/projected/ded4654b-7fab-4413-a3e9-f6b746d6c690-kube-api-access-n99n9\") pod \"nova-api-0\" (UID: \"ded4654b-7fab-4413-a3e9-f6b746d6c690\") " pod="openstack/nova-api-0" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.282090 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d922n\" (UniqueName: \"kubernetes.io/projected/bf5f2bae-4136-4518-9ef0-058628b17355-kube-api-access-d922n\") pod \"nova-scheduler-0\" (UID: \"bf5f2bae-4136-4518-9ef0-058628b17355\") " pod="openstack/nova-scheduler-0" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.282108 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28c73692-1325-4815-83ed-e0ae3aec6901-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"28c73692-1325-4815-83ed-e0ae3aec6901\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.282132 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf5f2bae-4136-4518-9ef0-058628b17355-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"bf5f2bae-4136-4518-9ef0-058628b17355\") " pod="openstack/nova-scheduler-0" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.282180 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ded4654b-7fab-4413-a3e9-f6b746d6c690-logs\") pod \"nova-api-0\" (UID: \"ded4654b-7fab-4413-a3e9-f6b746d6c690\") " pod="openstack/nova-api-0" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.282229 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ded4654b-7fab-4413-a3e9-f6b746d6c690-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"ded4654b-7fab-4413-a3e9-f6b746d6c690\") " pod="openstack/nova-api-0" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.282270 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf5f2bae-4136-4518-9ef0-058628b17355-config-data\") pod \"nova-scheduler-0\" (UID: \"bf5f2bae-4136-4518-9ef0-058628b17355\") " pod="openstack/nova-scheduler-0" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.283092 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.297022 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ded4654b-7fab-4413-a3e9-f6b746d6c690-logs\") pod \"nova-api-0\" (UID: \"ded4654b-7fab-4413-a3e9-f6b746d6c690\") " pod="openstack/nova-api-0" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.298233 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf5f2bae-4136-4518-9ef0-058628b17355-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"bf5f2bae-4136-4518-9ef0-058628b17355\") " pod="openstack/nova-scheduler-0" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.304783 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ded4654b-7fab-4413-a3e9-f6b746d6c690-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"ded4654b-7fab-4413-a3e9-f6b746d6c690\") " pod="openstack/nova-api-0" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.305186 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ded4654b-7fab-4413-a3e9-f6b746d6c690-config-data\") pod \"nova-api-0\" (UID: \"ded4654b-7fab-4413-a3e9-f6b746d6c690\") " pod="openstack/nova-api-0" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.305221 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf5f2bae-4136-4518-9ef0-058628b17355-config-data\") pod \"nova-scheduler-0\" (UID: \"bf5f2bae-4136-4518-9ef0-058628b17355\") " pod="openstack/nova-scheduler-0" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.316616 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.320057 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.325006 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n99n9\" (UniqueName: \"kubernetes.io/projected/ded4654b-7fab-4413-a3e9-f6b746d6c690-kube-api-access-n99n9\") pod \"nova-api-0\" (UID: \"ded4654b-7fab-4413-a3e9-f6b746d6c690\") " pod="openstack/nova-api-0" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.325698 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d922n\" (UniqueName: \"kubernetes.io/projected/bf5f2bae-4136-4518-9ef0-058628b17355-kube-api-access-d922n\") pod \"nova-scheduler-0\" (UID: \"bf5f2bae-4136-4518-9ef0-058628b17355\") " pod="openstack/nova-scheduler-0" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.327245 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.340118 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.371022 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.384494 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28c73692-1325-4815-83ed-e0ae3aec6901-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"28c73692-1325-4815-83ed-e0ae3aec6901\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.384545 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9fpfw\" (UniqueName: \"kubernetes.io/projected/28c73692-1325-4815-83ed-e0ae3aec6901-kube-api-access-9fpfw\") pod \"nova-cell1-novncproxy-0\" (UID: \"28c73692-1325-4815-83ed-e0ae3aec6901\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.384622 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28c73692-1325-4815-83ed-e0ae3aec6901-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"28c73692-1325-4815-83ed-e0ae3aec6901\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.395891 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28c73692-1325-4815-83ed-e0ae3aec6901-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"28c73692-1325-4815-83ed-e0ae3aec6901\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.397418 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7c9cb78d75-pkjmb"] Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.398343 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28c73692-1325-4815-83ed-e0ae3aec6901-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"28c73692-1325-4815-83ed-e0ae3aec6901\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.399313 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c9cb78d75-pkjmb" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.408858 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9fpfw\" (UniqueName: \"kubernetes.io/projected/28c73692-1325-4815-83ed-e0ae3aec6901-kube-api-access-9fpfw\") pod \"nova-cell1-novncproxy-0\" (UID: \"28c73692-1325-4815-83ed-e0ae3aec6901\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.423024 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7c9cb78d75-pkjmb"] Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.486647 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1df911e0-274f-4ed7-913d-b972d8deb099-logs\") pod \"nova-metadata-0\" (UID: \"1df911e0-274f-4ed7-913d-b972d8deb099\") " pod="openstack/nova-metadata-0" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.486689 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1df911e0-274f-4ed7-913d-b972d8deb099-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"1df911e0-274f-4ed7-913d-b972d8deb099\") " pod="openstack/nova-metadata-0" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.486752 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1df911e0-274f-4ed7-913d-b972d8deb099-config-data\") pod \"nova-metadata-0\" (UID: \"1df911e0-274f-4ed7-913d-b972d8deb099\") " pod="openstack/nova-metadata-0" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.486782 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xwt7b\" (UniqueName: \"kubernetes.io/projected/1df911e0-274f-4ed7-913d-b972d8deb099-kube-api-access-xwt7b\") pod \"nova-metadata-0\" (UID: \"1df911e0-274f-4ed7-913d-b972d8deb099\") " pod="openstack/nova-metadata-0" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.519689 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.588996 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ab2065af-a1a7-4876-a066-37f7f01a8435-ovsdbserver-sb\") pod \"dnsmasq-dns-7c9cb78d75-pkjmb\" (UID: \"ab2065af-a1a7-4876-a066-37f7f01a8435\") " pod="openstack/dnsmasq-dns-7c9cb78d75-pkjmb" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.589062 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ab2065af-a1a7-4876-a066-37f7f01a8435-dns-swift-storage-0\") pod \"dnsmasq-dns-7c9cb78d75-pkjmb\" (UID: \"ab2065af-a1a7-4876-a066-37f7f01a8435\") " pod="openstack/dnsmasq-dns-7c9cb78d75-pkjmb" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.589113 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ab2065af-a1a7-4876-a066-37f7f01a8435-ovsdbserver-nb\") pod \"dnsmasq-dns-7c9cb78d75-pkjmb\" (UID: \"ab2065af-a1a7-4876-a066-37f7f01a8435\") " pod="openstack/dnsmasq-dns-7c9cb78d75-pkjmb" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.589181 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1df911e0-274f-4ed7-913d-b972d8deb099-config-data\") pod \"nova-metadata-0\" (UID: \"1df911e0-274f-4ed7-913d-b972d8deb099\") " pod="openstack/nova-metadata-0" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.589229 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ab2065af-a1a7-4876-a066-37f7f01a8435-config\") pod \"dnsmasq-dns-7c9cb78d75-pkjmb\" (UID: \"ab2065af-a1a7-4876-a066-37f7f01a8435\") " pod="openstack/dnsmasq-dns-7c9cb78d75-pkjmb" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.589264 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xwt7b\" (UniqueName: \"kubernetes.io/projected/1df911e0-274f-4ed7-913d-b972d8deb099-kube-api-access-xwt7b\") pod \"nova-metadata-0\" (UID: \"1df911e0-274f-4ed7-913d-b972d8deb099\") " pod="openstack/nova-metadata-0" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.589376 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ck67s\" (UniqueName: \"kubernetes.io/projected/ab2065af-a1a7-4876-a066-37f7f01a8435-kube-api-access-ck67s\") pod \"dnsmasq-dns-7c9cb78d75-pkjmb\" (UID: \"ab2065af-a1a7-4876-a066-37f7f01a8435\") " pod="openstack/dnsmasq-dns-7c9cb78d75-pkjmb" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.589467 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1df911e0-274f-4ed7-913d-b972d8deb099-logs\") pod \"nova-metadata-0\" (UID: \"1df911e0-274f-4ed7-913d-b972d8deb099\") " pod="openstack/nova-metadata-0" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.589492 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1df911e0-274f-4ed7-913d-b972d8deb099-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"1df911e0-274f-4ed7-913d-b972d8deb099\") " pod="openstack/nova-metadata-0" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.589533 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ab2065af-a1a7-4876-a066-37f7f01a8435-dns-svc\") pod \"dnsmasq-dns-7c9cb78d75-pkjmb\" (UID: \"ab2065af-a1a7-4876-a066-37f7f01a8435\") " pod="openstack/dnsmasq-dns-7c9cb78d75-pkjmb" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.590289 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1df911e0-274f-4ed7-913d-b972d8deb099-logs\") pod \"nova-metadata-0\" (UID: \"1df911e0-274f-4ed7-913d-b972d8deb099\") " pod="openstack/nova-metadata-0" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.599618 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1df911e0-274f-4ed7-913d-b972d8deb099-config-data\") pod \"nova-metadata-0\" (UID: \"1df911e0-274f-4ed7-913d-b972d8deb099\") " pod="openstack/nova-metadata-0" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.611402 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1df911e0-274f-4ed7-913d-b972d8deb099-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"1df911e0-274f-4ed7-913d-b972d8deb099\") " pod="openstack/nova-metadata-0" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.615652 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xwt7b\" (UniqueName: \"kubernetes.io/projected/1df911e0-274f-4ed7-913d-b972d8deb099-kube-api-access-xwt7b\") pod \"nova-metadata-0\" (UID: \"1df911e0-274f-4ed7-913d-b972d8deb099\") " pod="openstack/nova-metadata-0" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.657218 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.672844 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.692400 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ab2065af-a1a7-4876-a066-37f7f01a8435-dns-svc\") pod \"dnsmasq-dns-7c9cb78d75-pkjmb\" (UID: \"ab2065af-a1a7-4876-a066-37f7f01a8435\") " pod="openstack/dnsmasq-dns-7c9cb78d75-pkjmb" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.692590 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ab2065af-a1a7-4876-a066-37f7f01a8435-ovsdbserver-sb\") pod \"dnsmasq-dns-7c9cb78d75-pkjmb\" (UID: \"ab2065af-a1a7-4876-a066-37f7f01a8435\") " pod="openstack/dnsmasq-dns-7c9cb78d75-pkjmb" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.692670 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ab2065af-a1a7-4876-a066-37f7f01a8435-dns-swift-storage-0\") pod \"dnsmasq-dns-7c9cb78d75-pkjmb\" (UID: \"ab2065af-a1a7-4876-a066-37f7f01a8435\") " pod="openstack/dnsmasq-dns-7c9cb78d75-pkjmb" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.692773 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ab2065af-a1a7-4876-a066-37f7f01a8435-ovsdbserver-nb\") pod \"dnsmasq-dns-7c9cb78d75-pkjmb\" (UID: \"ab2065af-a1a7-4876-a066-37f7f01a8435\") " pod="openstack/dnsmasq-dns-7c9cb78d75-pkjmb" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.692851 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ab2065af-a1a7-4876-a066-37f7f01a8435-config\") pod \"dnsmasq-dns-7c9cb78d75-pkjmb\" (UID: \"ab2065af-a1a7-4876-a066-37f7f01a8435\") " pod="openstack/dnsmasq-dns-7c9cb78d75-pkjmb" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.692982 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ck67s\" (UniqueName: \"kubernetes.io/projected/ab2065af-a1a7-4876-a066-37f7f01a8435-kube-api-access-ck67s\") pod \"dnsmasq-dns-7c9cb78d75-pkjmb\" (UID: \"ab2065af-a1a7-4876-a066-37f7f01a8435\") " pod="openstack/dnsmasq-dns-7c9cb78d75-pkjmb" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.694091 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ab2065af-a1a7-4876-a066-37f7f01a8435-dns-svc\") pod \"dnsmasq-dns-7c9cb78d75-pkjmb\" (UID: \"ab2065af-a1a7-4876-a066-37f7f01a8435\") " pod="openstack/dnsmasq-dns-7c9cb78d75-pkjmb" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.694684 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ab2065af-a1a7-4876-a066-37f7f01a8435-ovsdbserver-sb\") pod \"dnsmasq-dns-7c9cb78d75-pkjmb\" (UID: \"ab2065af-a1a7-4876-a066-37f7f01a8435\") " pod="openstack/dnsmasq-dns-7c9cb78d75-pkjmb" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.695383 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ab2065af-a1a7-4876-a066-37f7f01a8435-dns-swift-storage-0\") pod \"dnsmasq-dns-7c9cb78d75-pkjmb\" (UID: \"ab2065af-a1a7-4876-a066-37f7f01a8435\") " pod="openstack/dnsmasq-dns-7c9cb78d75-pkjmb" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.695969 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ab2065af-a1a7-4876-a066-37f7f01a8435-ovsdbserver-nb\") pod \"dnsmasq-dns-7c9cb78d75-pkjmb\" (UID: \"ab2065af-a1a7-4876-a066-37f7f01a8435\") " pod="openstack/dnsmasq-dns-7c9cb78d75-pkjmb" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.697016 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ab2065af-a1a7-4876-a066-37f7f01a8435-config\") pod \"dnsmasq-dns-7c9cb78d75-pkjmb\" (UID: \"ab2065af-a1a7-4876-a066-37f7f01a8435\") " pod="openstack/dnsmasq-dns-7c9cb78d75-pkjmb" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.722781 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ck67s\" (UniqueName: \"kubernetes.io/projected/ab2065af-a1a7-4876-a066-37f7f01a8435-kube-api-access-ck67s\") pod \"dnsmasq-dns-7c9cb78d75-pkjmb\" (UID: \"ab2065af-a1a7-4876-a066-37f7f01a8435\") " pod="openstack/dnsmasq-dns-7c9cb78d75-pkjmb" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.745253 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1efbaacd-213c-469a-8bee-1515c6eca23f" path="/var/lib/kubelet/pods/1efbaacd-213c-469a-8bee-1515c6eca23f/volumes" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.762296 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cdd3659b-07fa-436e-93f0-b6b55fcead58","Type":"ContainerStarted","Data":"cd1a97ef33bbfa59eae87dbb1a186e797c7a2f5c8003981a5a799d327e2dc85a"} Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.793222 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c9cb78d75-pkjmb" Dec 05 17:36:19 crc kubenswrapper[4753]: I1205 17:36:19.927429 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-h8pkm"] Dec 05 17:36:20 crc kubenswrapper[4753]: I1205 17:36:20.154922 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 17:36:20 crc kubenswrapper[4753]: W1205 17:36:20.195690 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbf5f2bae_4136_4518_9ef0_058628b17355.slice/crio-0a378072587b9be2bd1efa80d382d9d0b2b49c609ce1360ae564ffcdb64f41c1 WatchSource:0}: Error finding container 0a378072587b9be2bd1efa80d382d9d0b2b49c609ce1360ae564ffcdb64f41c1: Status 404 returned error can't find the container with id 0a378072587b9be2bd1efa80d382d9d0b2b49c609ce1360ae564ffcdb64f41c1 Dec 05 17:36:20 crc kubenswrapper[4753]: I1205 17:36:20.359908 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-mgmpb"] Dec 05 17:36:20 crc kubenswrapper[4753]: I1205 17:36:20.370342 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-mgmpb" Dec 05 17:36:20 crc kubenswrapper[4753]: I1205 17:36:20.373163 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 05 17:36:20 crc kubenswrapper[4753]: I1205 17:36:20.373348 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Dec 05 17:36:20 crc kubenswrapper[4753]: I1205 17:36:20.432237 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 17:36:20 crc kubenswrapper[4753]: I1205 17:36:20.447969 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-mgmpb"] Dec 05 17:36:20 crc kubenswrapper[4753]: I1205 17:36:20.542668 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92da46a9-fbb7-4c2b-a26d-22065b21a23a-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-mgmpb\" (UID: \"92da46a9-fbb7-4c2b-a26d-22065b21a23a\") " pod="openstack/nova-cell1-conductor-db-sync-mgmpb" Dec 05 17:36:20 crc kubenswrapper[4753]: I1205 17:36:20.542811 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/92da46a9-fbb7-4c2b-a26d-22065b21a23a-scripts\") pod \"nova-cell1-conductor-db-sync-mgmpb\" (UID: \"92da46a9-fbb7-4c2b-a26d-22065b21a23a\") " pod="openstack/nova-cell1-conductor-db-sync-mgmpb" Dec 05 17:36:20 crc kubenswrapper[4753]: I1205 17:36:20.542833 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92da46a9-fbb7-4c2b-a26d-22065b21a23a-config-data\") pod \"nova-cell1-conductor-db-sync-mgmpb\" (UID: \"92da46a9-fbb7-4c2b-a26d-22065b21a23a\") " pod="openstack/nova-cell1-conductor-db-sync-mgmpb" Dec 05 17:36:20 crc kubenswrapper[4753]: I1205 17:36:20.542908 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2rd72\" (UniqueName: \"kubernetes.io/projected/92da46a9-fbb7-4c2b-a26d-22065b21a23a-kube-api-access-2rd72\") pod \"nova-cell1-conductor-db-sync-mgmpb\" (UID: \"92da46a9-fbb7-4c2b-a26d-22065b21a23a\") " pod="openstack/nova-cell1-conductor-db-sync-mgmpb" Dec 05 17:36:20 crc kubenswrapper[4753]: I1205 17:36:20.546920 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 17:36:20 crc kubenswrapper[4753]: I1205 17:36:20.577456 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 17:36:20 crc kubenswrapper[4753]: I1205 17:36:20.644914 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/92da46a9-fbb7-4c2b-a26d-22065b21a23a-scripts\") pod \"nova-cell1-conductor-db-sync-mgmpb\" (UID: \"92da46a9-fbb7-4c2b-a26d-22065b21a23a\") " pod="openstack/nova-cell1-conductor-db-sync-mgmpb" Dec 05 17:36:20 crc kubenswrapper[4753]: I1205 17:36:20.644963 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92da46a9-fbb7-4c2b-a26d-22065b21a23a-config-data\") pod \"nova-cell1-conductor-db-sync-mgmpb\" (UID: \"92da46a9-fbb7-4c2b-a26d-22065b21a23a\") " pod="openstack/nova-cell1-conductor-db-sync-mgmpb" Dec 05 17:36:20 crc kubenswrapper[4753]: I1205 17:36:20.645044 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2rd72\" (UniqueName: \"kubernetes.io/projected/92da46a9-fbb7-4c2b-a26d-22065b21a23a-kube-api-access-2rd72\") pod \"nova-cell1-conductor-db-sync-mgmpb\" (UID: \"92da46a9-fbb7-4c2b-a26d-22065b21a23a\") " pod="openstack/nova-cell1-conductor-db-sync-mgmpb" Dec 05 17:36:20 crc kubenswrapper[4753]: I1205 17:36:20.645072 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92da46a9-fbb7-4c2b-a26d-22065b21a23a-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-mgmpb\" (UID: \"92da46a9-fbb7-4c2b-a26d-22065b21a23a\") " pod="openstack/nova-cell1-conductor-db-sync-mgmpb" Dec 05 17:36:20 crc kubenswrapper[4753]: I1205 17:36:20.658833 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/92da46a9-fbb7-4c2b-a26d-22065b21a23a-scripts\") pod \"nova-cell1-conductor-db-sync-mgmpb\" (UID: \"92da46a9-fbb7-4c2b-a26d-22065b21a23a\") " pod="openstack/nova-cell1-conductor-db-sync-mgmpb" Dec 05 17:36:20 crc kubenswrapper[4753]: I1205 17:36:20.659003 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92da46a9-fbb7-4c2b-a26d-22065b21a23a-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-mgmpb\" (UID: \"92da46a9-fbb7-4c2b-a26d-22065b21a23a\") " pod="openstack/nova-cell1-conductor-db-sync-mgmpb" Dec 05 17:36:20 crc kubenswrapper[4753]: I1205 17:36:20.663809 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92da46a9-fbb7-4c2b-a26d-22065b21a23a-config-data\") pod \"nova-cell1-conductor-db-sync-mgmpb\" (UID: \"92da46a9-fbb7-4c2b-a26d-22065b21a23a\") " pod="openstack/nova-cell1-conductor-db-sync-mgmpb" Dec 05 17:36:20 crc kubenswrapper[4753]: I1205 17:36:20.683857 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2rd72\" (UniqueName: \"kubernetes.io/projected/92da46a9-fbb7-4c2b-a26d-22065b21a23a-kube-api-access-2rd72\") pod \"nova-cell1-conductor-db-sync-mgmpb\" (UID: \"92da46a9-fbb7-4c2b-a26d-22065b21a23a\") " pod="openstack/nova-cell1-conductor-db-sync-mgmpb" Dec 05 17:36:20 crc kubenswrapper[4753]: I1205 17:36:20.706681 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-mgmpb" Dec 05 17:36:20 crc kubenswrapper[4753]: I1205 17:36:20.833567 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-h8pkm" event={"ID":"db47010e-762f-430c-b384-215d4ea30192","Type":"ContainerStarted","Data":"fe889ebfa403e966a3c6f1989b49cc16d95b97030b92a4ebac4ae6a38762fa88"} Dec 05 17:36:20 crc kubenswrapper[4753]: I1205 17:36:20.833668 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-h8pkm" event={"ID":"db47010e-762f-430c-b384-215d4ea30192","Type":"ContainerStarted","Data":"27e26885234f21ef643a9cecbfc11fd6c0a0dcc6f9af20cd1f1c7131c9129591"} Dec 05 17:36:20 crc kubenswrapper[4753]: I1205 17:36:20.841038 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"bf5f2bae-4136-4518-9ef0-058628b17355","Type":"ContainerStarted","Data":"0a378072587b9be2bd1efa80d382d9d0b2b49c609ce1360ae564ffcdb64f41c1"} Dec 05 17:36:20 crc kubenswrapper[4753]: I1205 17:36:20.844132 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cdd3659b-07fa-436e-93f0-b6b55fcead58","Type":"ContainerStarted","Data":"f8d8c6b51e78b11df564815a8800e8f02479847d968808269980faba71113b94"} Dec 05 17:36:20 crc kubenswrapper[4753]: I1205 17:36:20.857568 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7c9cb78d75-pkjmb"] Dec 05 17:36:20 crc kubenswrapper[4753]: I1205 17:36:20.868435 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ded4654b-7fab-4413-a3e9-f6b746d6c690","Type":"ContainerStarted","Data":"c61bb3b1f04d7f77f68f661c99a4927eee17ab1301821d96a82131da1faedc05"} Dec 05 17:36:20 crc kubenswrapper[4753]: I1205 17:36:20.873022 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"1df911e0-274f-4ed7-913d-b972d8deb099","Type":"ContainerStarted","Data":"1727bb373196626b5559e7b8accaa5546ec16676c181a9acfba0cfced994db63"} Dec 05 17:36:20 crc kubenswrapper[4753]: I1205 17:36:20.876895 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"28c73692-1325-4815-83ed-e0ae3aec6901","Type":"ContainerStarted","Data":"ced7d7db7ba114f8821e6a4e5a1d17aa84e67c1f83d578309d9ea13425a9d426"} Dec 05 17:36:20 crc kubenswrapper[4753]: I1205 17:36:20.878689 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-h8pkm" podStartSLOduration=2.878670355 podStartE2EDuration="2.878670355s" podCreationTimestamp="2025-12-05 17:36:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:36:20.868692322 +0000 UTC m=+1919.371799328" watchObservedRunningTime="2025-12-05 17:36:20.878670355 +0000 UTC m=+1919.381777361" Dec 05 17:36:21 crc kubenswrapper[4753]: I1205 17:36:21.469971 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-mgmpb"] Dec 05 17:36:21 crc kubenswrapper[4753]: W1205 17:36:21.521778 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod92da46a9_fbb7_4c2b_a26d_22065b21a23a.slice/crio-a1b3543ac582de5a55271df429fdc0f08d2f52c8fe669f57da3629a2770516ac WatchSource:0}: Error finding container a1b3543ac582de5a55271df429fdc0f08d2f52c8fe669f57da3629a2770516ac: Status 404 returned error can't find the container with id a1b3543ac582de5a55271df429fdc0f08d2f52c8fe669f57da3629a2770516ac Dec 05 17:36:21 crc kubenswrapper[4753]: I1205 17:36:21.896589 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-mgmpb" event={"ID":"92da46a9-fbb7-4c2b-a26d-22065b21a23a","Type":"ContainerStarted","Data":"60f81511a25640815c4a1caecb42650016dab8ca75a64b978b891dd3577eefd7"} Dec 05 17:36:21 crc kubenswrapper[4753]: I1205 17:36:21.896942 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-mgmpb" event={"ID":"92da46a9-fbb7-4c2b-a26d-22065b21a23a","Type":"ContainerStarted","Data":"a1b3543ac582de5a55271df429fdc0f08d2f52c8fe669f57da3629a2770516ac"} Dec 05 17:36:21 crc kubenswrapper[4753]: I1205 17:36:21.900659 4753 generic.go:334] "Generic (PLEG): container finished" podID="ab2065af-a1a7-4876-a066-37f7f01a8435" containerID="33a3651c422c8e2f273387cc663e42074bd20878e632ec75569c59a75349949f" exitCode=0 Dec 05 17:36:21 crc kubenswrapper[4753]: I1205 17:36:21.900700 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c9cb78d75-pkjmb" event={"ID":"ab2065af-a1a7-4876-a066-37f7f01a8435","Type":"ContainerDied","Data":"33a3651c422c8e2f273387cc663e42074bd20878e632ec75569c59a75349949f"} Dec 05 17:36:21 crc kubenswrapper[4753]: I1205 17:36:21.900715 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c9cb78d75-pkjmb" event={"ID":"ab2065af-a1a7-4876-a066-37f7f01a8435","Type":"ContainerStarted","Data":"9a650a62fe14ffff8159bbae0f92005596b88c16e554d4a9d1b82e92b73b047f"} Dec 05 17:36:21 crc kubenswrapper[4753]: I1205 17:36:21.918039 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cdd3659b-07fa-436e-93f0-b6b55fcead58","Type":"ContainerStarted","Data":"66cf734570f26d4816f153692dac7b97d2bb5769dfd7e28d005ee6a7e6c8d96b"} Dec 05 17:36:21 crc kubenswrapper[4753]: I1205 17:36:21.923018 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-mgmpb" podStartSLOduration=1.9230007439999999 podStartE2EDuration="1.923000744s" podCreationTimestamp="2025-12-05 17:36:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:36:21.91829298 +0000 UTC m=+1920.421399986" watchObservedRunningTime="2025-12-05 17:36:21.923000744 +0000 UTC m=+1920.426107750" Dec 05 17:36:22 crc kubenswrapper[4753]: I1205 17:36:22.944225 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c9cb78d75-pkjmb" event={"ID":"ab2065af-a1a7-4876-a066-37f7f01a8435","Type":"ContainerStarted","Data":"86183eb8d8bdb6d521faa4ede93d1ee375c0da43d4b9c858f5b0c60eb01727f4"} Dec 05 17:36:22 crc kubenswrapper[4753]: I1205 17:36:22.946206 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7c9cb78d75-pkjmb" Dec 05 17:36:23 crc kubenswrapper[4753]: I1205 17:36:23.401769 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7c9cb78d75-pkjmb" podStartSLOduration=4.401744394 podStartE2EDuration="4.401744394s" podCreationTimestamp="2025-12-05 17:36:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:36:22.970575284 +0000 UTC m=+1921.473682300" watchObservedRunningTime="2025-12-05 17:36:23.401744394 +0000 UTC m=+1921.904851400" Dec 05 17:36:23 crc kubenswrapper[4753]: I1205 17:36:23.411854 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 17:36:23 crc kubenswrapper[4753]: I1205 17:36:23.427383 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 17:36:25 crc kubenswrapper[4753]: I1205 17:36:25.999277 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"bf5f2bae-4136-4518-9ef0-058628b17355","Type":"ContainerStarted","Data":"b41e003847efc0799e05b33a863357e5b07c5a8bfa25eeb18360e3c8a532ee5d"} Dec 05 17:36:26 crc kubenswrapper[4753]: I1205 17:36:26.003696 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ded4654b-7fab-4413-a3e9-f6b746d6c690","Type":"ContainerStarted","Data":"2f36662f0424c142c0ba053a8a9f4261cf493984be35ff3f6b9a3900e618e79e"} Dec 05 17:36:26 crc kubenswrapper[4753]: I1205 17:36:26.017850 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"28c73692-1325-4815-83ed-e0ae3aec6901","Type":"ContainerStarted","Data":"7d4f0a458b9c0687936b01c6787314109f1ba22173f7fa7f4076977691bc3f12"} Dec 05 17:36:26 crc kubenswrapper[4753]: I1205 17:36:26.018110 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="28c73692-1325-4815-83ed-e0ae3aec6901" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://7d4f0a458b9c0687936b01c6787314109f1ba22173f7fa7f4076977691bc3f12" gracePeriod=30 Dec 05 17:36:26 crc kubenswrapper[4753]: I1205 17:36:26.023033 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.729933264 podStartE2EDuration="8.02301341s" podCreationTimestamp="2025-12-05 17:36:18 +0000 UTC" firstStartedPulling="2025-12-05 17:36:20.200772261 +0000 UTC m=+1918.703879267" lastFinishedPulling="2025-12-05 17:36:25.493852407 +0000 UTC m=+1923.996959413" observedRunningTime="2025-12-05 17:36:26.01773802 +0000 UTC m=+1924.520845026" watchObservedRunningTime="2025-12-05 17:36:26.02301341 +0000 UTC m=+1924.526120416" Dec 05 17:36:26 crc kubenswrapper[4753]: I1205 17:36:26.043383 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cdd3659b-07fa-436e-93f0-b6b55fcead58","Type":"ContainerStarted","Data":"368ddfd5ece7e3ca53b5af287ae2501e74ffc1b7945c30fac3f67b1d3437c325"} Dec 05 17:36:26 crc kubenswrapper[4753]: I1205 17:36:26.044408 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 17:36:26 crc kubenswrapper[4753]: I1205 17:36:26.056720 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.099043362 podStartE2EDuration="7.056695556s" podCreationTimestamp="2025-12-05 17:36:19 +0000 UTC" firstStartedPulling="2025-12-05 17:36:20.5642819 +0000 UTC m=+1919.067388896" lastFinishedPulling="2025-12-05 17:36:25.521934084 +0000 UTC m=+1924.025041090" observedRunningTime="2025-12-05 17:36:26.044619503 +0000 UTC m=+1924.547726509" watchObservedRunningTime="2025-12-05 17:36:26.056695556 +0000 UTC m=+1924.559802562" Dec 05 17:36:26 crc kubenswrapper[4753]: I1205 17:36:26.058813 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"1df911e0-274f-4ed7-913d-b972d8deb099","Type":"ContainerStarted","Data":"754da66f80e8abfd29131c4f6b9277e7f437795ba065e64706bbf9808cd6e548"} Dec 05 17:36:26 crc kubenswrapper[4753]: I1205 17:36:26.077100 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.261123745 podStartE2EDuration="9.077077865s" podCreationTimestamp="2025-12-05 17:36:17 +0000 UTC" firstStartedPulling="2025-12-05 17:36:18.704476912 +0000 UTC m=+1917.207583918" lastFinishedPulling="2025-12-05 17:36:25.520431032 +0000 UTC m=+1924.023538038" observedRunningTime="2025-12-05 17:36:26.06424004 +0000 UTC m=+1924.567347046" watchObservedRunningTime="2025-12-05 17:36:26.077077865 +0000 UTC m=+1924.580184871" Dec 05 17:36:27 crc kubenswrapper[4753]: I1205 17:36:27.080529 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ded4654b-7fab-4413-a3e9-f6b746d6c690","Type":"ContainerStarted","Data":"91936b53f6d173ba40cc9d5d11598feb1be04f8379582af2b59ecec4b9e83a04"} Dec 05 17:36:27 crc kubenswrapper[4753]: I1205 17:36:27.087608 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"1df911e0-274f-4ed7-913d-b972d8deb099","Type":"ContainerStarted","Data":"2c4337dcb536a1e002d2255fe04957fcb4e198f271acd152c67e0171fae87288"} Dec 05 17:36:27 crc kubenswrapper[4753]: I1205 17:36:27.088068 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="1df911e0-274f-4ed7-913d-b972d8deb099" containerName="nova-metadata-metadata" containerID="cri-o://2c4337dcb536a1e002d2255fe04957fcb4e198f271acd152c67e0171fae87288" gracePeriod=30 Dec 05 17:36:27 crc kubenswrapper[4753]: I1205 17:36:27.088100 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="1df911e0-274f-4ed7-913d-b972d8deb099" containerName="nova-metadata-log" containerID="cri-o://754da66f80e8abfd29131c4f6b9277e7f437795ba065e64706bbf9808cd6e548" gracePeriod=30 Dec 05 17:36:27 crc kubenswrapper[4753]: I1205 17:36:27.118796 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=3.985791738 podStartE2EDuration="9.118776658s" podCreationTimestamp="2025-12-05 17:36:18 +0000 UTC" firstStartedPulling="2025-12-05 17:36:20.374335409 +0000 UTC m=+1918.877442405" lastFinishedPulling="2025-12-05 17:36:25.507320329 +0000 UTC m=+1924.010427325" observedRunningTime="2025-12-05 17:36:27.115611548 +0000 UTC m=+1925.618718574" watchObservedRunningTime="2025-12-05 17:36:27.118776658 +0000 UTC m=+1925.621883664" Dec 05 17:36:27 crc kubenswrapper[4753]: I1205 17:36:27.155856 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.31489279 podStartE2EDuration="8.1558201s" podCreationTimestamp="2025-12-05 17:36:19 +0000 UTC" firstStartedPulling="2025-12-05 17:36:20.55793305 +0000 UTC m=+1919.061040056" lastFinishedPulling="2025-12-05 17:36:25.39886035 +0000 UTC m=+1923.901967366" observedRunningTime="2025-12-05 17:36:27.154869262 +0000 UTC m=+1925.657976268" watchObservedRunningTime="2025-12-05 17:36:27.1558201 +0000 UTC m=+1925.658927106" Dec 05 17:36:27 crc kubenswrapper[4753]: E1205 17:36:27.175606 4753 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1df911e0_274f_4ed7_913d_b972d8deb099.slice/crio-754da66f80e8abfd29131c4f6b9277e7f437795ba065e64706bbf9808cd6e548.scope\": RecentStats: unable to find data in memory cache]" Dec 05 17:36:28 crc kubenswrapper[4753]: I1205 17:36:28.102109 4753 generic.go:334] "Generic (PLEG): container finished" podID="1df911e0-274f-4ed7-913d-b972d8deb099" containerID="2c4337dcb536a1e002d2255fe04957fcb4e198f271acd152c67e0171fae87288" exitCode=0 Dec 05 17:36:28 crc kubenswrapper[4753]: I1205 17:36:28.102650 4753 generic.go:334] "Generic (PLEG): container finished" podID="1df911e0-274f-4ed7-913d-b972d8deb099" containerID="754da66f80e8abfd29131c4f6b9277e7f437795ba065e64706bbf9808cd6e548" exitCode=143 Dec 05 17:36:28 crc kubenswrapper[4753]: I1205 17:36:28.102176 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"1df911e0-274f-4ed7-913d-b972d8deb099","Type":"ContainerDied","Data":"2c4337dcb536a1e002d2255fe04957fcb4e198f271acd152c67e0171fae87288"} Dec 05 17:36:28 crc kubenswrapper[4753]: I1205 17:36:28.102951 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"1df911e0-274f-4ed7-913d-b972d8deb099","Type":"ContainerDied","Data":"754da66f80e8abfd29131c4f6b9277e7f437795ba065e64706bbf9808cd6e548"} Dec 05 17:36:28 crc kubenswrapper[4753]: I1205 17:36:28.102984 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"1df911e0-274f-4ed7-913d-b972d8deb099","Type":"ContainerDied","Data":"1727bb373196626b5559e7b8accaa5546ec16676c181a9acfba0cfced994db63"} Dec 05 17:36:28 crc kubenswrapper[4753]: I1205 17:36:28.102997 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1727bb373196626b5559e7b8accaa5546ec16676c181a9acfba0cfced994db63" Dec 05 17:36:28 crc kubenswrapper[4753]: I1205 17:36:28.163796 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 17:36:28 crc kubenswrapper[4753]: I1205 17:36:28.342808 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1df911e0-274f-4ed7-913d-b972d8deb099-combined-ca-bundle\") pod \"1df911e0-274f-4ed7-913d-b972d8deb099\" (UID: \"1df911e0-274f-4ed7-913d-b972d8deb099\") " Dec 05 17:36:28 crc kubenswrapper[4753]: I1205 17:36:28.343013 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xwt7b\" (UniqueName: \"kubernetes.io/projected/1df911e0-274f-4ed7-913d-b972d8deb099-kube-api-access-xwt7b\") pod \"1df911e0-274f-4ed7-913d-b972d8deb099\" (UID: \"1df911e0-274f-4ed7-913d-b972d8deb099\") " Dec 05 17:36:28 crc kubenswrapper[4753]: I1205 17:36:28.343746 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1df911e0-274f-4ed7-913d-b972d8deb099-logs\") pod \"1df911e0-274f-4ed7-913d-b972d8deb099\" (UID: \"1df911e0-274f-4ed7-913d-b972d8deb099\") " Dec 05 17:36:28 crc kubenswrapper[4753]: I1205 17:36:28.343770 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1df911e0-274f-4ed7-913d-b972d8deb099-config-data\") pod \"1df911e0-274f-4ed7-913d-b972d8deb099\" (UID: \"1df911e0-274f-4ed7-913d-b972d8deb099\") " Dec 05 17:36:28 crc kubenswrapper[4753]: I1205 17:36:28.343987 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1df911e0-274f-4ed7-913d-b972d8deb099-logs" (OuterVolumeSpecName: "logs") pod "1df911e0-274f-4ed7-913d-b972d8deb099" (UID: "1df911e0-274f-4ed7-913d-b972d8deb099"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:36:28 crc kubenswrapper[4753]: I1205 17:36:28.344440 4753 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1df911e0-274f-4ed7-913d-b972d8deb099-logs\") on node \"crc\" DevicePath \"\"" Dec 05 17:36:28 crc kubenswrapper[4753]: I1205 17:36:28.354459 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1df911e0-274f-4ed7-913d-b972d8deb099-kube-api-access-xwt7b" (OuterVolumeSpecName: "kube-api-access-xwt7b") pod "1df911e0-274f-4ed7-913d-b972d8deb099" (UID: "1df911e0-274f-4ed7-913d-b972d8deb099"). InnerVolumeSpecName "kube-api-access-xwt7b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:36:28 crc kubenswrapper[4753]: I1205 17:36:28.388004 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1df911e0-274f-4ed7-913d-b972d8deb099-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1df911e0-274f-4ed7-913d-b972d8deb099" (UID: "1df911e0-274f-4ed7-913d-b972d8deb099"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:36:28 crc kubenswrapper[4753]: I1205 17:36:28.401778 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1df911e0-274f-4ed7-913d-b972d8deb099-config-data" (OuterVolumeSpecName: "config-data") pod "1df911e0-274f-4ed7-913d-b972d8deb099" (UID: "1df911e0-274f-4ed7-913d-b972d8deb099"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:36:28 crc kubenswrapper[4753]: I1205 17:36:28.446323 4753 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1df911e0-274f-4ed7-913d-b972d8deb099-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:36:28 crc kubenswrapper[4753]: I1205 17:36:28.446363 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xwt7b\" (UniqueName: \"kubernetes.io/projected/1df911e0-274f-4ed7-913d-b972d8deb099-kube-api-access-xwt7b\") on node \"crc\" DevicePath \"\"" Dec 05 17:36:28 crc kubenswrapper[4753]: I1205 17:36:28.446379 4753 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1df911e0-274f-4ed7-913d-b972d8deb099-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:36:29 crc kubenswrapper[4753]: I1205 17:36:29.119450 4753 generic.go:334] "Generic (PLEG): container finished" podID="db47010e-762f-430c-b384-215d4ea30192" containerID="fe889ebfa403e966a3c6f1989b49cc16d95b97030b92a4ebac4ae6a38762fa88" exitCode=0 Dec 05 17:36:29 crc kubenswrapper[4753]: I1205 17:36:29.120231 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 17:36:29 crc kubenswrapper[4753]: I1205 17:36:29.127601 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-h8pkm" event={"ID":"db47010e-762f-430c-b384-215d4ea30192","Type":"ContainerDied","Data":"fe889ebfa403e966a3c6f1989b49cc16d95b97030b92a4ebac4ae6a38762fa88"} Dec 05 17:36:29 crc kubenswrapper[4753]: I1205 17:36:29.194850 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 17:36:29 crc kubenswrapper[4753]: I1205 17:36:29.208233 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 17:36:29 crc kubenswrapper[4753]: I1205 17:36:29.220875 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 05 17:36:29 crc kubenswrapper[4753]: E1205 17:36:29.221503 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1df911e0-274f-4ed7-913d-b972d8deb099" containerName="nova-metadata-log" Dec 05 17:36:29 crc kubenswrapper[4753]: I1205 17:36:29.221527 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="1df911e0-274f-4ed7-913d-b972d8deb099" containerName="nova-metadata-log" Dec 05 17:36:29 crc kubenswrapper[4753]: E1205 17:36:29.221549 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1df911e0-274f-4ed7-913d-b972d8deb099" containerName="nova-metadata-metadata" Dec 05 17:36:29 crc kubenswrapper[4753]: I1205 17:36:29.221562 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="1df911e0-274f-4ed7-913d-b972d8deb099" containerName="nova-metadata-metadata" Dec 05 17:36:29 crc kubenswrapper[4753]: I1205 17:36:29.221833 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="1df911e0-274f-4ed7-913d-b972d8deb099" containerName="nova-metadata-metadata" Dec 05 17:36:29 crc kubenswrapper[4753]: I1205 17:36:29.221859 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="1df911e0-274f-4ed7-913d-b972d8deb099" containerName="nova-metadata-log" Dec 05 17:36:29 crc kubenswrapper[4753]: I1205 17:36:29.223467 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 17:36:29 crc kubenswrapper[4753]: I1205 17:36:29.225238 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 05 17:36:29 crc kubenswrapper[4753]: I1205 17:36:29.228092 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 05 17:36:29 crc kubenswrapper[4753]: I1205 17:36:29.232369 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 17:36:29 crc kubenswrapper[4753]: I1205 17:36:29.366929 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qmsvn\" (UniqueName: \"kubernetes.io/projected/b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17-kube-api-access-qmsvn\") pod \"nova-metadata-0\" (UID: \"b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17\") " pod="openstack/nova-metadata-0" Dec 05 17:36:29 crc kubenswrapper[4753]: I1205 17:36:29.366998 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17\") " pod="openstack/nova-metadata-0" Dec 05 17:36:29 crc kubenswrapper[4753]: I1205 17:36:29.367468 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17-config-data\") pod \"nova-metadata-0\" (UID: \"b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17\") " pod="openstack/nova-metadata-0" Dec 05 17:36:29 crc kubenswrapper[4753]: I1205 17:36:29.367517 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17-logs\") pod \"nova-metadata-0\" (UID: \"b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17\") " pod="openstack/nova-metadata-0" Dec 05 17:36:29 crc kubenswrapper[4753]: I1205 17:36:29.367549 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17\") " pod="openstack/nova-metadata-0" Dec 05 17:36:29 crc kubenswrapper[4753]: I1205 17:36:29.371847 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 05 17:36:29 crc kubenswrapper[4753]: I1205 17:36:29.371881 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 05 17:36:29 crc kubenswrapper[4753]: I1205 17:36:29.401722 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 05 17:36:29 crc kubenswrapper[4753]: I1205 17:36:29.469392 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qmsvn\" (UniqueName: \"kubernetes.io/projected/b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17-kube-api-access-qmsvn\") pod \"nova-metadata-0\" (UID: \"b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17\") " pod="openstack/nova-metadata-0" Dec 05 17:36:29 crc kubenswrapper[4753]: I1205 17:36:29.469463 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17\") " pod="openstack/nova-metadata-0" Dec 05 17:36:29 crc kubenswrapper[4753]: I1205 17:36:29.469565 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17-logs\") pod \"nova-metadata-0\" (UID: \"b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17\") " pod="openstack/nova-metadata-0" Dec 05 17:36:29 crc kubenswrapper[4753]: I1205 17:36:29.469582 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17\") " pod="openstack/nova-metadata-0" Dec 05 17:36:29 crc kubenswrapper[4753]: I1205 17:36:29.469597 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17-config-data\") pod \"nova-metadata-0\" (UID: \"b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17\") " pod="openstack/nova-metadata-0" Dec 05 17:36:29 crc kubenswrapper[4753]: I1205 17:36:29.470599 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17-logs\") pod \"nova-metadata-0\" (UID: \"b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17\") " pod="openstack/nova-metadata-0" Dec 05 17:36:29 crc kubenswrapper[4753]: I1205 17:36:29.474398 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17\") " pod="openstack/nova-metadata-0" Dec 05 17:36:29 crc kubenswrapper[4753]: I1205 17:36:29.474643 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17-config-data\") pod \"nova-metadata-0\" (UID: \"b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17\") " pod="openstack/nova-metadata-0" Dec 05 17:36:29 crc kubenswrapper[4753]: I1205 17:36:29.476092 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17\") " pod="openstack/nova-metadata-0" Dec 05 17:36:29 crc kubenswrapper[4753]: I1205 17:36:29.490063 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qmsvn\" (UniqueName: \"kubernetes.io/projected/b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17-kube-api-access-qmsvn\") pod \"nova-metadata-0\" (UID: \"b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17\") " pod="openstack/nova-metadata-0" Dec 05 17:36:29 crc kubenswrapper[4753]: I1205 17:36:29.521736 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 17:36:29 crc kubenswrapper[4753]: I1205 17:36:29.522109 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 17:36:29 crc kubenswrapper[4753]: I1205 17:36:29.557799 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 17:36:29 crc kubenswrapper[4753]: I1205 17:36:29.658871 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 05 17:36:29 crc kubenswrapper[4753]: I1205 17:36:29.745708 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1df911e0-274f-4ed7-913d-b972d8deb099" path="/var/lib/kubelet/pods/1df911e0-274f-4ed7-913d-b972d8deb099/volumes" Dec 05 17:36:29 crc kubenswrapper[4753]: I1205 17:36:29.796325 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7c9cb78d75-pkjmb" Dec 05 17:36:29 crc kubenswrapper[4753]: I1205 17:36:29.863010 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86d9875b97-4k4sh"] Dec 05 17:36:29 crc kubenswrapper[4753]: I1205 17:36:29.863291 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-86d9875b97-4k4sh" podUID="1a611a40-5e56-413b-8f3f-244522e530e7" containerName="dnsmasq-dns" containerID="cri-o://cb46533efe28465f7340e3af85f8cdbc3da3c4e966742908a6f98e20a671ba9d" gracePeriod=10 Dec 05 17:36:30 crc kubenswrapper[4753]: I1205 17:36:30.090855 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 17:36:30 crc kubenswrapper[4753]: W1205 17:36:30.096889 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb5d1526e_6ea4_41d5_ac9c_3d3a87af8f17.slice/crio-49dc8dded99f7fcadfc74b0338d5ee401e56ca157f5444670984de937b6e0bf7 WatchSource:0}: Error finding container 49dc8dded99f7fcadfc74b0338d5ee401e56ca157f5444670984de937b6e0bf7: Status 404 returned error can't find the container with id 49dc8dded99f7fcadfc74b0338d5ee401e56ca157f5444670984de937b6e0bf7 Dec 05 17:36:30 crc kubenswrapper[4753]: I1205 17:36:30.151295 4753 generic.go:334] "Generic (PLEG): container finished" podID="1a611a40-5e56-413b-8f3f-244522e530e7" containerID="cb46533efe28465f7340e3af85f8cdbc3da3c4e966742908a6f98e20a671ba9d" exitCode=0 Dec 05 17:36:30 crc kubenswrapper[4753]: I1205 17:36:30.151709 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86d9875b97-4k4sh" event={"ID":"1a611a40-5e56-413b-8f3f-244522e530e7","Type":"ContainerDied","Data":"cb46533efe28465f7340e3af85f8cdbc3da3c4e966742908a6f98e20a671ba9d"} Dec 05 17:36:30 crc kubenswrapper[4753]: I1205 17:36:30.156218 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17","Type":"ContainerStarted","Data":"49dc8dded99f7fcadfc74b0338d5ee401e56ca157f5444670984de937b6e0bf7"} Dec 05 17:36:30 crc kubenswrapper[4753]: I1205 17:36:30.199741 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 05 17:36:30 crc kubenswrapper[4753]: I1205 17:36:30.609366 4753 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="ded4654b-7fab-4413-a3e9-f6b746d6c690" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.213:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 17:36:30 crc kubenswrapper[4753]: I1205 17:36:30.610042 4753 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="ded4654b-7fab-4413-a3e9-f6b746d6c690" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.213:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 17:36:30 crc kubenswrapper[4753]: I1205 17:36:30.637096 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86d9875b97-4k4sh" Dec 05 17:36:30 crc kubenswrapper[4753]: I1205 17:36:30.799785 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1a611a40-5e56-413b-8f3f-244522e530e7-dns-svc\") pod \"1a611a40-5e56-413b-8f3f-244522e530e7\" (UID: \"1a611a40-5e56-413b-8f3f-244522e530e7\") " Dec 05 17:36:30 crc kubenswrapper[4753]: I1205 17:36:30.799909 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xts4k\" (UniqueName: \"kubernetes.io/projected/1a611a40-5e56-413b-8f3f-244522e530e7-kube-api-access-xts4k\") pod \"1a611a40-5e56-413b-8f3f-244522e530e7\" (UID: \"1a611a40-5e56-413b-8f3f-244522e530e7\") " Dec 05 17:36:30 crc kubenswrapper[4753]: I1205 17:36:30.799954 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1a611a40-5e56-413b-8f3f-244522e530e7-dns-swift-storage-0\") pod \"1a611a40-5e56-413b-8f3f-244522e530e7\" (UID: \"1a611a40-5e56-413b-8f3f-244522e530e7\") " Dec 05 17:36:30 crc kubenswrapper[4753]: I1205 17:36:30.800001 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1a611a40-5e56-413b-8f3f-244522e530e7-ovsdbserver-nb\") pod \"1a611a40-5e56-413b-8f3f-244522e530e7\" (UID: \"1a611a40-5e56-413b-8f3f-244522e530e7\") " Dec 05 17:36:30 crc kubenswrapper[4753]: I1205 17:36:30.800024 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a611a40-5e56-413b-8f3f-244522e530e7-config\") pod \"1a611a40-5e56-413b-8f3f-244522e530e7\" (UID: \"1a611a40-5e56-413b-8f3f-244522e530e7\") " Dec 05 17:36:30 crc kubenswrapper[4753]: I1205 17:36:30.800364 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1a611a40-5e56-413b-8f3f-244522e530e7-ovsdbserver-sb\") pod \"1a611a40-5e56-413b-8f3f-244522e530e7\" (UID: \"1a611a40-5e56-413b-8f3f-244522e530e7\") " Dec 05 17:36:30 crc kubenswrapper[4753]: I1205 17:36:30.829321 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1a611a40-5e56-413b-8f3f-244522e530e7-kube-api-access-xts4k" (OuterVolumeSpecName: "kube-api-access-xts4k") pod "1a611a40-5e56-413b-8f3f-244522e530e7" (UID: "1a611a40-5e56-413b-8f3f-244522e530e7"). InnerVolumeSpecName "kube-api-access-xts4k". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:36:30 crc kubenswrapper[4753]: I1205 17:36:30.902688 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xts4k\" (UniqueName: \"kubernetes.io/projected/1a611a40-5e56-413b-8f3f-244522e530e7-kube-api-access-xts4k\") on node \"crc\" DevicePath \"\"" Dec 05 17:36:30 crc kubenswrapper[4753]: I1205 17:36:30.932470 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a611a40-5e56-413b-8f3f-244522e530e7-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "1a611a40-5e56-413b-8f3f-244522e530e7" (UID: "1a611a40-5e56-413b-8f3f-244522e530e7"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:36:30 crc kubenswrapper[4753]: I1205 17:36:30.944596 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a611a40-5e56-413b-8f3f-244522e530e7-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "1a611a40-5e56-413b-8f3f-244522e530e7" (UID: "1a611a40-5e56-413b-8f3f-244522e530e7"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:36:30 crc kubenswrapper[4753]: I1205 17:36:30.951432 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a611a40-5e56-413b-8f3f-244522e530e7-config" (OuterVolumeSpecName: "config") pod "1a611a40-5e56-413b-8f3f-244522e530e7" (UID: "1a611a40-5e56-413b-8f3f-244522e530e7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:36:30 crc kubenswrapper[4753]: I1205 17:36:30.953270 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a611a40-5e56-413b-8f3f-244522e530e7-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "1a611a40-5e56-413b-8f3f-244522e530e7" (UID: "1a611a40-5e56-413b-8f3f-244522e530e7"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:36:30 crc kubenswrapper[4753]: I1205 17:36:30.957658 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a611a40-5e56-413b-8f3f-244522e530e7-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "1a611a40-5e56-413b-8f3f-244522e530e7" (UID: "1a611a40-5e56-413b-8f3f-244522e530e7"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:36:31 crc kubenswrapper[4753]: I1205 17:36:31.004571 4753 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1a611a40-5e56-413b-8f3f-244522e530e7-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 17:36:31 crc kubenswrapper[4753]: I1205 17:36:31.004823 4753 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1a611a40-5e56-413b-8f3f-244522e530e7-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 17:36:31 crc kubenswrapper[4753]: I1205 17:36:31.004896 4753 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1a611a40-5e56-413b-8f3f-244522e530e7-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 17:36:31 crc kubenswrapper[4753]: I1205 17:36:31.004961 4753 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1a611a40-5e56-413b-8f3f-244522e530e7-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 17:36:31 crc kubenswrapper[4753]: I1205 17:36:31.005020 4753 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a611a40-5e56-413b-8f3f-244522e530e7-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:36:31 crc kubenswrapper[4753]: I1205 17:36:31.048735 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-h8pkm" Dec 05 17:36:31 crc kubenswrapper[4753]: I1205 17:36:31.176801 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86d9875b97-4k4sh" event={"ID":"1a611a40-5e56-413b-8f3f-244522e530e7","Type":"ContainerDied","Data":"221da1e6d94006b11246110a7ab8c353651a553c7a5407adfe086426b367f37b"} Dec 05 17:36:31 crc kubenswrapper[4753]: I1205 17:36:31.176868 4753 scope.go:117] "RemoveContainer" containerID="cb46533efe28465f7340e3af85f8cdbc3da3c4e966742908a6f98e20a671ba9d" Dec 05 17:36:31 crc kubenswrapper[4753]: I1205 17:36:31.177050 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86d9875b97-4k4sh" Dec 05 17:36:31 crc kubenswrapper[4753]: I1205 17:36:31.191276 4753 generic.go:334] "Generic (PLEG): container finished" podID="92da46a9-fbb7-4c2b-a26d-22065b21a23a" containerID="60f81511a25640815c4a1caecb42650016dab8ca75a64b978b891dd3577eefd7" exitCode=0 Dec 05 17:36:31 crc kubenswrapper[4753]: I1205 17:36:31.191325 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-mgmpb" event={"ID":"92da46a9-fbb7-4c2b-a26d-22065b21a23a","Type":"ContainerDied","Data":"60f81511a25640815c4a1caecb42650016dab8ca75a64b978b891dd3577eefd7"} Dec 05 17:36:31 crc kubenswrapper[4753]: I1205 17:36:31.193586 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17","Type":"ContainerStarted","Data":"5bde1c87e473409e6ddc7eadcfd5f50b830b601a8828ff8a25ab7afe3e262386"} Dec 05 17:36:31 crc kubenswrapper[4753]: I1205 17:36:31.193667 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17","Type":"ContainerStarted","Data":"5161150f0b5aed869a50fc3fbdd8c7218190b1909083c09b1570fd85d739d332"} Dec 05 17:36:31 crc kubenswrapper[4753]: I1205 17:36:31.195225 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-h8pkm" Dec 05 17:36:31 crc kubenswrapper[4753]: I1205 17:36:31.195831 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-h8pkm" event={"ID":"db47010e-762f-430c-b384-215d4ea30192","Type":"ContainerDied","Data":"27e26885234f21ef643a9cecbfc11fd6c0a0dcc6f9af20cd1f1c7131c9129591"} Dec 05 17:36:31 crc kubenswrapper[4753]: I1205 17:36:31.195868 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="27e26885234f21ef643a9cecbfc11fd6c0a0dcc6f9af20cd1f1c7131c9129591" Dec 05 17:36:31 crc kubenswrapper[4753]: I1205 17:36:31.210798 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kkk4j\" (UniqueName: \"kubernetes.io/projected/db47010e-762f-430c-b384-215d4ea30192-kube-api-access-kkk4j\") pod \"db47010e-762f-430c-b384-215d4ea30192\" (UID: \"db47010e-762f-430c-b384-215d4ea30192\") " Dec 05 17:36:31 crc kubenswrapper[4753]: I1205 17:36:31.210976 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db47010e-762f-430c-b384-215d4ea30192-combined-ca-bundle\") pod \"db47010e-762f-430c-b384-215d4ea30192\" (UID: \"db47010e-762f-430c-b384-215d4ea30192\") " Dec 05 17:36:31 crc kubenswrapper[4753]: I1205 17:36:31.211036 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/db47010e-762f-430c-b384-215d4ea30192-scripts\") pod \"db47010e-762f-430c-b384-215d4ea30192\" (UID: \"db47010e-762f-430c-b384-215d4ea30192\") " Dec 05 17:36:31 crc kubenswrapper[4753]: I1205 17:36:31.211123 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db47010e-762f-430c-b384-215d4ea30192-config-data\") pod \"db47010e-762f-430c-b384-215d4ea30192\" (UID: \"db47010e-762f-430c-b384-215d4ea30192\") " Dec 05 17:36:31 crc kubenswrapper[4753]: I1205 17:36:31.223383 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/db47010e-762f-430c-b384-215d4ea30192-kube-api-access-kkk4j" (OuterVolumeSpecName: "kube-api-access-kkk4j") pod "db47010e-762f-430c-b384-215d4ea30192" (UID: "db47010e-762f-430c-b384-215d4ea30192"). InnerVolumeSpecName "kube-api-access-kkk4j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:36:31 crc kubenswrapper[4753]: I1205 17:36:31.227690 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db47010e-762f-430c-b384-215d4ea30192-scripts" (OuterVolumeSpecName: "scripts") pod "db47010e-762f-430c-b384-215d4ea30192" (UID: "db47010e-762f-430c-b384-215d4ea30192"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:36:31 crc kubenswrapper[4753]: I1205 17:36:31.229338 4753 scope.go:117] "RemoveContainer" containerID="b6fac4a38433e3c9efd7d4017bb76ea9b494e0c56f8f21b5dee3bebb0fcbc5df" Dec 05 17:36:31 crc kubenswrapper[4753]: I1205 17:36:31.257282 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db47010e-762f-430c-b384-215d4ea30192-config-data" (OuterVolumeSpecName: "config-data") pod "db47010e-762f-430c-b384-215d4ea30192" (UID: "db47010e-762f-430c-b384-215d4ea30192"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:36:31 crc kubenswrapper[4753]: I1205 17:36:31.266049 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db47010e-762f-430c-b384-215d4ea30192-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "db47010e-762f-430c-b384-215d4ea30192" (UID: "db47010e-762f-430c-b384-215d4ea30192"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:36:31 crc kubenswrapper[4753]: I1205 17:36:31.270386 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.270368337 podStartE2EDuration="2.270368337s" podCreationTimestamp="2025-12-05 17:36:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:36:31.236961709 +0000 UTC m=+1929.740068745" watchObservedRunningTime="2025-12-05 17:36:31.270368337 +0000 UTC m=+1929.773475343" Dec 05 17:36:31 crc kubenswrapper[4753]: I1205 17:36:31.282216 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86d9875b97-4k4sh"] Dec 05 17:36:31 crc kubenswrapper[4753]: I1205 17:36:31.291307 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-86d9875b97-4k4sh"] Dec 05 17:36:31 crc kubenswrapper[4753]: I1205 17:36:31.314108 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kkk4j\" (UniqueName: \"kubernetes.io/projected/db47010e-762f-430c-b384-215d4ea30192-kube-api-access-kkk4j\") on node \"crc\" DevicePath \"\"" Dec 05 17:36:31 crc kubenswrapper[4753]: I1205 17:36:31.314164 4753 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db47010e-762f-430c-b384-215d4ea30192-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:36:31 crc kubenswrapper[4753]: I1205 17:36:31.314176 4753 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/db47010e-762f-430c-b384-215d4ea30192-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:36:31 crc kubenswrapper[4753]: I1205 17:36:31.314184 4753 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db47010e-762f-430c-b384-215d4ea30192-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:36:31 crc kubenswrapper[4753]: I1205 17:36:31.364593 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 17:36:31 crc kubenswrapper[4753]: I1205 17:36:31.374244 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 17:36:31 crc kubenswrapper[4753]: I1205 17:36:31.374497 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="ded4654b-7fab-4413-a3e9-f6b746d6c690" containerName="nova-api-log" containerID="cri-o://2f36662f0424c142c0ba053a8a9f4261cf493984be35ff3f6b9a3900e618e79e" gracePeriod=30 Dec 05 17:36:31 crc kubenswrapper[4753]: I1205 17:36:31.374585 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="ded4654b-7fab-4413-a3e9-f6b746d6c690" containerName="nova-api-api" containerID="cri-o://91936b53f6d173ba40cc9d5d11598feb1be04f8379582af2b59ecec4b9e83a04" gracePeriod=30 Dec 05 17:36:31 crc kubenswrapper[4753]: I1205 17:36:31.452489 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 17:36:31 crc kubenswrapper[4753]: I1205 17:36:31.735361 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1a611a40-5e56-413b-8f3f-244522e530e7" path="/var/lib/kubelet/pods/1a611a40-5e56-413b-8f3f-244522e530e7/volumes" Dec 05 17:36:32 crc kubenswrapper[4753]: I1205 17:36:32.207793 4753 generic.go:334] "Generic (PLEG): container finished" podID="ded4654b-7fab-4413-a3e9-f6b746d6c690" containerID="2f36662f0424c142c0ba053a8a9f4261cf493984be35ff3f6b9a3900e618e79e" exitCode=143 Dec 05 17:36:32 crc kubenswrapper[4753]: I1205 17:36:32.207870 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ded4654b-7fab-4413-a3e9-f6b746d6c690","Type":"ContainerDied","Data":"2f36662f0424c142c0ba053a8a9f4261cf493984be35ff3f6b9a3900e618e79e"} Dec 05 17:36:32 crc kubenswrapper[4753]: I1205 17:36:32.209537 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="bf5f2bae-4136-4518-9ef0-058628b17355" containerName="nova-scheduler-scheduler" containerID="cri-o://b41e003847efc0799e05b33a863357e5b07c5a8bfa25eeb18360e3c8a532ee5d" gracePeriod=30 Dec 05 17:36:32 crc kubenswrapper[4753]: I1205 17:36:32.723849 4753 scope.go:117] "RemoveContainer" containerID="87682a74661e693e498cd793cc20d16fc9f4a3b8a1a6b54f10285e2dcd15eafd" Dec 05 17:36:32 crc kubenswrapper[4753]: E1205 17:36:32.724352 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 17:36:32 crc kubenswrapper[4753]: I1205 17:36:32.762834 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-mgmpb" Dec 05 17:36:32 crc kubenswrapper[4753]: I1205 17:36:32.852057 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2rd72\" (UniqueName: \"kubernetes.io/projected/92da46a9-fbb7-4c2b-a26d-22065b21a23a-kube-api-access-2rd72\") pod \"92da46a9-fbb7-4c2b-a26d-22065b21a23a\" (UID: \"92da46a9-fbb7-4c2b-a26d-22065b21a23a\") " Dec 05 17:36:32 crc kubenswrapper[4753]: I1205 17:36:32.852245 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/92da46a9-fbb7-4c2b-a26d-22065b21a23a-scripts\") pod \"92da46a9-fbb7-4c2b-a26d-22065b21a23a\" (UID: \"92da46a9-fbb7-4c2b-a26d-22065b21a23a\") " Dec 05 17:36:32 crc kubenswrapper[4753]: I1205 17:36:32.852345 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92da46a9-fbb7-4c2b-a26d-22065b21a23a-config-data\") pod \"92da46a9-fbb7-4c2b-a26d-22065b21a23a\" (UID: \"92da46a9-fbb7-4c2b-a26d-22065b21a23a\") " Dec 05 17:36:32 crc kubenswrapper[4753]: I1205 17:36:32.852432 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92da46a9-fbb7-4c2b-a26d-22065b21a23a-combined-ca-bundle\") pod \"92da46a9-fbb7-4c2b-a26d-22065b21a23a\" (UID: \"92da46a9-fbb7-4c2b-a26d-22065b21a23a\") " Dec 05 17:36:32 crc kubenswrapper[4753]: I1205 17:36:32.864981 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/92da46a9-fbb7-4c2b-a26d-22065b21a23a-kube-api-access-2rd72" (OuterVolumeSpecName: "kube-api-access-2rd72") pod "92da46a9-fbb7-4c2b-a26d-22065b21a23a" (UID: "92da46a9-fbb7-4c2b-a26d-22065b21a23a"). InnerVolumeSpecName "kube-api-access-2rd72". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:36:32 crc kubenswrapper[4753]: I1205 17:36:32.870728 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92da46a9-fbb7-4c2b-a26d-22065b21a23a-scripts" (OuterVolumeSpecName: "scripts") pod "92da46a9-fbb7-4c2b-a26d-22065b21a23a" (UID: "92da46a9-fbb7-4c2b-a26d-22065b21a23a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:36:32 crc kubenswrapper[4753]: I1205 17:36:32.887224 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92da46a9-fbb7-4c2b-a26d-22065b21a23a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "92da46a9-fbb7-4c2b-a26d-22065b21a23a" (UID: "92da46a9-fbb7-4c2b-a26d-22065b21a23a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:36:32 crc kubenswrapper[4753]: I1205 17:36:32.889812 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92da46a9-fbb7-4c2b-a26d-22065b21a23a-config-data" (OuterVolumeSpecName: "config-data") pod "92da46a9-fbb7-4c2b-a26d-22065b21a23a" (UID: "92da46a9-fbb7-4c2b-a26d-22065b21a23a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:36:32 crc kubenswrapper[4753]: I1205 17:36:32.954922 4753 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/92da46a9-fbb7-4c2b-a26d-22065b21a23a-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:36:32 crc kubenswrapper[4753]: I1205 17:36:32.954969 4753 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92da46a9-fbb7-4c2b-a26d-22065b21a23a-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:36:32 crc kubenswrapper[4753]: I1205 17:36:32.954984 4753 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92da46a9-fbb7-4c2b-a26d-22065b21a23a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:36:32 crc kubenswrapper[4753]: I1205 17:36:32.954998 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2rd72\" (UniqueName: \"kubernetes.io/projected/92da46a9-fbb7-4c2b-a26d-22065b21a23a-kube-api-access-2rd72\") on node \"crc\" DevicePath \"\"" Dec 05 17:36:33 crc kubenswrapper[4753]: I1205 17:36:33.222372 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-mgmpb" Dec 05 17:36:33 crc kubenswrapper[4753]: I1205 17:36:33.222370 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-mgmpb" event={"ID":"92da46a9-fbb7-4c2b-a26d-22065b21a23a","Type":"ContainerDied","Data":"a1b3543ac582de5a55271df429fdc0f08d2f52c8fe669f57da3629a2770516ac"} Dec 05 17:36:33 crc kubenswrapper[4753]: I1205 17:36:33.222435 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a1b3543ac582de5a55271df429fdc0f08d2f52c8fe669f57da3629a2770516ac" Dec 05 17:36:33 crc kubenswrapper[4753]: I1205 17:36:33.222551 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17" containerName="nova-metadata-log" containerID="cri-o://5161150f0b5aed869a50fc3fbdd8c7218190b1909083c09b1570fd85d739d332" gracePeriod=30 Dec 05 17:36:33 crc kubenswrapper[4753]: I1205 17:36:33.222692 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17" containerName="nova-metadata-metadata" containerID="cri-o://5bde1c87e473409e6ddc7eadcfd5f50b830b601a8828ff8a25ab7afe3e262386" gracePeriod=30 Dec 05 17:36:33 crc kubenswrapper[4753]: I1205 17:36:33.304964 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 05 17:36:33 crc kubenswrapper[4753]: E1205 17:36:33.305434 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92da46a9-fbb7-4c2b-a26d-22065b21a23a" containerName="nova-cell1-conductor-db-sync" Dec 05 17:36:33 crc kubenswrapper[4753]: I1205 17:36:33.305451 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="92da46a9-fbb7-4c2b-a26d-22065b21a23a" containerName="nova-cell1-conductor-db-sync" Dec 05 17:36:33 crc kubenswrapper[4753]: E1205 17:36:33.305484 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db47010e-762f-430c-b384-215d4ea30192" containerName="nova-manage" Dec 05 17:36:33 crc kubenswrapper[4753]: I1205 17:36:33.305490 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="db47010e-762f-430c-b384-215d4ea30192" containerName="nova-manage" Dec 05 17:36:33 crc kubenswrapper[4753]: E1205 17:36:33.305509 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a611a40-5e56-413b-8f3f-244522e530e7" containerName="init" Dec 05 17:36:33 crc kubenswrapper[4753]: I1205 17:36:33.305515 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a611a40-5e56-413b-8f3f-244522e530e7" containerName="init" Dec 05 17:36:33 crc kubenswrapper[4753]: E1205 17:36:33.305525 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a611a40-5e56-413b-8f3f-244522e530e7" containerName="dnsmasq-dns" Dec 05 17:36:33 crc kubenswrapper[4753]: I1205 17:36:33.305530 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a611a40-5e56-413b-8f3f-244522e530e7" containerName="dnsmasq-dns" Dec 05 17:36:33 crc kubenswrapper[4753]: I1205 17:36:33.305718 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="1a611a40-5e56-413b-8f3f-244522e530e7" containerName="dnsmasq-dns" Dec 05 17:36:33 crc kubenswrapper[4753]: I1205 17:36:33.305729 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="db47010e-762f-430c-b384-215d4ea30192" containerName="nova-manage" Dec 05 17:36:33 crc kubenswrapper[4753]: I1205 17:36:33.305749 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="92da46a9-fbb7-4c2b-a26d-22065b21a23a" containerName="nova-cell1-conductor-db-sync" Dec 05 17:36:33 crc kubenswrapper[4753]: I1205 17:36:33.306511 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 05 17:36:33 crc kubenswrapper[4753]: I1205 17:36:33.308753 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 05 17:36:33 crc kubenswrapper[4753]: I1205 17:36:33.322673 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 05 17:36:33 crc kubenswrapper[4753]: I1205 17:36:33.467454 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x9skg\" (UniqueName: \"kubernetes.io/projected/696fed1a-38d3-459f-b08b-128b8d41d472-kube-api-access-x9skg\") pod \"nova-cell1-conductor-0\" (UID: \"696fed1a-38d3-459f-b08b-128b8d41d472\") " pod="openstack/nova-cell1-conductor-0" Dec 05 17:36:33 crc kubenswrapper[4753]: I1205 17:36:33.468308 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/696fed1a-38d3-459f-b08b-128b8d41d472-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"696fed1a-38d3-459f-b08b-128b8d41d472\") " pod="openstack/nova-cell1-conductor-0" Dec 05 17:36:33 crc kubenswrapper[4753]: I1205 17:36:33.468415 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/696fed1a-38d3-459f-b08b-128b8d41d472-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"696fed1a-38d3-459f-b08b-128b8d41d472\") " pod="openstack/nova-cell1-conductor-0" Dec 05 17:36:33 crc kubenswrapper[4753]: I1205 17:36:33.571463 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/696fed1a-38d3-459f-b08b-128b8d41d472-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"696fed1a-38d3-459f-b08b-128b8d41d472\") " pod="openstack/nova-cell1-conductor-0" Dec 05 17:36:33 crc kubenswrapper[4753]: I1205 17:36:33.571569 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/696fed1a-38d3-459f-b08b-128b8d41d472-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"696fed1a-38d3-459f-b08b-128b8d41d472\") " pod="openstack/nova-cell1-conductor-0" Dec 05 17:36:33 crc kubenswrapper[4753]: I1205 17:36:33.571807 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x9skg\" (UniqueName: \"kubernetes.io/projected/696fed1a-38d3-459f-b08b-128b8d41d472-kube-api-access-x9skg\") pod \"nova-cell1-conductor-0\" (UID: \"696fed1a-38d3-459f-b08b-128b8d41d472\") " pod="openstack/nova-cell1-conductor-0" Dec 05 17:36:33 crc kubenswrapper[4753]: I1205 17:36:33.584646 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/696fed1a-38d3-459f-b08b-128b8d41d472-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"696fed1a-38d3-459f-b08b-128b8d41d472\") " pod="openstack/nova-cell1-conductor-0" Dec 05 17:36:33 crc kubenswrapper[4753]: I1205 17:36:33.585702 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/696fed1a-38d3-459f-b08b-128b8d41d472-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"696fed1a-38d3-459f-b08b-128b8d41d472\") " pod="openstack/nova-cell1-conductor-0" Dec 05 17:36:33 crc kubenswrapper[4753]: I1205 17:36:33.594388 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x9skg\" (UniqueName: \"kubernetes.io/projected/696fed1a-38d3-459f-b08b-128b8d41d472-kube-api-access-x9skg\") pod \"nova-cell1-conductor-0\" (UID: \"696fed1a-38d3-459f-b08b-128b8d41d472\") " pod="openstack/nova-cell1-conductor-0" Dec 05 17:36:33 crc kubenswrapper[4753]: I1205 17:36:33.622686 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 05 17:36:33 crc kubenswrapper[4753]: I1205 17:36:33.939007 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 17:36:34 crc kubenswrapper[4753]: I1205 17:36:34.102423 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17-combined-ca-bundle\") pod \"b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17\" (UID: \"b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17\") " Dec 05 17:36:34 crc kubenswrapper[4753]: I1205 17:36:34.102468 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17-logs\") pod \"b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17\" (UID: \"b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17\") " Dec 05 17:36:34 crc kubenswrapper[4753]: I1205 17:36:34.102530 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17-config-data\") pod \"b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17\" (UID: \"b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17\") " Dec 05 17:36:34 crc kubenswrapper[4753]: I1205 17:36:34.102626 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17-nova-metadata-tls-certs\") pod \"b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17\" (UID: \"b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17\") " Dec 05 17:36:34 crc kubenswrapper[4753]: I1205 17:36:34.102722 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qmsvn\" (UniqueName: \"kubernetes.io/projected/b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17-kube-api-access-qmsvn\") pod \"b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17\" (UID: \"b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17\") " Dec 05 17:36:34 crc kubenswrapper[4753]: I1205 17:36:34.104342 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17-logs" (OuterVolumeSpecName: "logs") pod "b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17" (UID: "b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:36:34 crc kubenswrapper[4753]: I1205 17:36:34.109403 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17-kube-api-access-qmsvn" (OuterVolumeSpecName: "kube-api-access-qmsvn") pod "b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17" (UID: "b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17"). InnerVolumeSpecName "kube-api-access-qmsvn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:36:34 crc kubenswrapper[4753]: I1205 17:36:34.144464 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17" (UID: "b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:36:34 crc kubenswrapper[4753]: I1205 17:36:34.153296 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17-config-data" (OuterVolumeSpecName: "config-data") pod "b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17" (UID: "b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:36:34 crc kubenswrapper[4753]: I1205 17:36:34.169857 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17" (UID: "b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:36:34 crc kubenswrapper[4753]: I1205 17:36:34.214521 4753 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17-logs\") on node \"crc\" DevicePath \"\"" Dec 05 17:36:34 crc kubenswrapper[4753]: I1205 17:36:34.214571 4753 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:36:34 crc kubenswrapper[4753]: I1205 17:36:34.214587 4753 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 17:36:34 crc kubenswrapper[4753]: I1205 17:36:34.214603 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qmsvn\" (UniqueName: \"kubernetes.io/projected/b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17-kube-api-access-qmsvn\") on node \"crc\" DevicePath \"\"" Dec 05 17:36:34 crc kubenswrapper[4753]: I1205 17:36:34.214616 4753 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:36:34 crc kubenswrapper[4753]: I1205 17:36:34.217219 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 05 17:36:34 crc kubenswrapper[4753]: I1205 17:36:34.232440 4753 generic.go:334] "Generic (PLEG): container finished" podID="b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17" containerID="5bde1c87e473409e6ddc7eadcfd5f50b830b601a8828ff8a25ab7afe3e262386" exitCode=0 Dec 05 17:36:34 crc kubenswrapper[4753]: I1205 17:36:34.234964 4753 generic.go:334] "Generic (PLEG): container finished" podID="b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17" containerID="5161150f0b5aed869a50fc3fbdd8c7218190b1909083c09b1570fd85d739d332" exitCode=143 Dec 05 17:36:34 crc kubenswrapper[4753]: I1205 17:36:34.232572 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 17:36:34 crc kubenswrapper[4753]: I1205 17:36:34.232507 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17","Type":"ContainerDied","Data":"5bde1c87e473409e6ddc7eadcfd5f50b830b601a8828ff8a25ab7afe3e262386"} Dec 05 17:36:34 crc kubenswrapper[4753]: I1205 17:36:34.236593 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17","Type":"ContainerDied","Data":"5161150f0b5aed869a50fc3fbdd8c7218190b1909083c09b1570fd85d739d332"} Dec 05 17:36:34 crc kubenswrapper[4753]: I1205 17:36:34.236616 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17","Type":"ContainerDied","Data":"49dc8dded99f7fcadfc74b0338d5ee401e56ca157f5444670984de937b6e0bf7"} Dec 05 17:36:34 crc kubenswrapper[4753]: I1205 17:36:34.236679 4753 scope.go:117] "RemoveContainer" containerID="5bde1c87e473409e6ddc7eadcfd5f50b830b601a8828ff8a25ab7afe3e262386" Dec 05 17:36:34 crc kubenswrapper[4753]: I1205 17:36:34.242698 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"696fed1a-38d3-459f-b08b-128b8d41d472","Type":"ContainerStarted","Data":"5e231cb679c7dfb5135df266ca12907184f7fd8b244c561b8e7ada9282bd57f1"} Dec 05 17:36:34 crc kubenswrapper[4753]: I1205 17:36:34.244176 4753 generic.go:334] "Generic (PLEG): container finished" podID="bf5f2bae-4136-4518-9ef0-058628b17355" containerID="b41e003847efc0799e05b33a863357e5b07c5a8bfa25eeb18360e3c8a532ee5d" exitCode=0 Dec 05 17:36:34 crc kubenswrapper[4753]: I1205 17:36:34.244211 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"bf5f2bae-4136-4518-9ef0-058628b17355","Type":"ContainerDied","Data":"b41e003847efc0799e05b33a863357e5b07c5a8bfa25eeb18360e3c8a532ee5d"} Dec 05 17:36:34 crc kubenswrapper[4753]: I1205 17:36:34.292893 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 17:36:34 crc kubenswrapper[4753]: I1205 17:36:34.303628 4753 scope.go:117] "RemoveContainer" containerID="5161150f0b5aed869a50fc3fbdd8c7218190b1909083c09b1570fd85d739d332" Dec 05 17:36:34 crc kubenswrapper[4753]: I1205 17:36:34.311297 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 17:36:34 crc kubenswrapper[4753]: I1205 17:36:34.335822 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 17:36:34 crc kubenswrapper[4753]: I1205 17:36:34.337878 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 05 17:36:34 crc kubenswrapper[4753]: E1205 17:36:34.338403 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17" containerName="nova-metadata-metadata" Dec 05 17:36:34 crc kubenswrapper[4753]: I1205 17:36:34.338432 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17" containerName="nova-metadata-metadata" Dec 05 17:36:34 crc kubenswrapper[4753]: E1205 17:36:34.338458 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17" containerName="nova-metadata-log" Dec 05 17:36:34 crc kubenswrapper[4753]: I1205 17:36:34.338467 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17" containerName="nova-metadata-log" Dec 05 17:36:34 crc kubenswrapper[4753]: E1205 17:36:34.338497 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf5f2bae-4136-4518-9ef0-058628b17355" containerName="nova-scheduler-scheduler" Dec 05 17:36:34 crc kubenswrapper[4753]: I1205 17:36:34.338508 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf5f2bae-4136-4518-9ef0-058628b17355" containerName="nova-scheduler-scheduler" Dec 05 17:36:34 crc kubenswrapper[4753]: I1205 17:36:34.338772 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17" containerName="nova-metadata-metadata" Dec 05 17:36:34 crc kubenswrapper[4753]: I1205 17:36:34.338797 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17" containerName="nova-metadata-log" Dec 05 17:36:34 crc kubenswrapper[4753]: I1205 17:36:34.338827 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="bf5f2bae-4136-4518-9ef0-058628b17355" containerName="nova-scheduler-scheduler" Dec 05 17:36:34 crc kubenswrapper[4753]: I1205 17:36:34.340349 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 17:36:34 crc kubenswrapper[4753]: I1205 17:36:34.343633 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 05 17:36:34 crc kubenswrapper[4753]: I1205 17:36:34.343640 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 05 17:36:34 crc kubenswrapper[4753]: I1205 17:36:34.350367 4753 scope.go:117] "RemoveContainer" containerID="5bde1c87e473409e6ddc7eadcfd5f50b830b601a8828ff8a25ab7afe3e262386" Dec 05 17:36:34 crc kubenswrapper[4753]: E1205 17:36:34.353410 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5bde1c87e473409e6ddc7eadcfd5f50b830b601a8828ff8a25ab7afe3e262386\": container with ID starting with 5bde1c87e473409e6ddc7eadcfd5f50b830b601a8828ff8a25ab7afe3e262386 not found: ID does not exist" containerID="5bde1c87e473409e6ddc7eadcfd5f50b830b601a8828ff8a25ab7afe3e262386" Dec 05 17:36:34 crc kubenswrapper[4753]: I1205 17:36:34.353470 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5bde1c87e473409e6ddc7eadcfd5f50b830b601a8828ff8a25ab7afe3e262386"} err="failed to get container status \"5bde1c87e473409e6ddc7eadcfd5f50b830b601a8828ff8a25ab7afe3e262386\": rpc error: code = NotFound desc = could not find container \"5bde1c87e473409e6ddc7eadcfd5f50b830b601a8828ff8a25ab7afe3e262386\": container with ID starting with 5bde1c87e473409e6ddc7eadcfd5f50b830b601a8828ff8a25ab7afe3e262386 not found: ID does not exist" Dec 05 17:36:34 crc kubenswrapper[4753]: I1205 17:36:34.353504 4753 scope.go:117] "RemoveContainer" containerID="5161150f0b5aed869a50fc3fbdd8c7218190b1909083c09b1570fd85d739d332" Dec 05 17:36:34 crc kubenswrapper[4753]: E1205 17:36:34.353913 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5161150f0b5aed869a50fc3fbdd8c7218190b1909083c09b1570fd85d739d332\": container with ID starting with 5161150f0b5aed869a50fc3fbdd8c7218190b1909083c09b1570fd85d739d332 not found: ID does not exist" containerID="5161150f0b5aed869a50fc3fbdd8c7218190b1909083c09b1570fd85d739d332" Dec 05 17:36:34 crc kubenswrapper[4753]: I1205 17:36:34.353953 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5161150f0b5aed869a50fc3fbdd8c7218190b1909083c09b1570fd85d739d332"} err="failed to get container status \"5161150f0b5aed869a50fc3fbdd8c7218190b1909083c09b1570fd85d739d332\": rpc error: code = NotFound desc = could not find container \"5161150f0b5aed869a50fc3fbdd8c7218190b1909083c09b1570fd85d739d332\": container with ID starting with 5161150f0b5aed869a50fc3fbdd8c7218190b1909083c09b1570fd85d739d332 not found: ID does not exist" Dec 05 17:36:34 crc kubenswrapper[4753]: I1205 17:36:34.353977 4753 scope.go:117] "RemoveContainer" containerID="5bde1c87e473409e6ddc7eadcfd5f50b830b601a8828ff8a25ab7afe3e262386" Dec 05 17:36:34 crc kubenswrapper[4753]: I1205 17:36:34.354283 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5bde1c87e473409e6ddc7eadcfd5f50b830b601a8828ff8a25ab7afe3e262386"} err="failed to get container status \"5bde1c87e473409e6ddc7eadcfd5f50b830b601a8828ff8a25ab7afe3e262386\": rpc error: code = NotFound desc = could not find container \"5bde1c87e473409e6ddc7eadcfd5f50b830b601a8828ff8a25ab7afe3e262386\": container with ID starting with 5bde1c87e473409e6ddc7eadcfd5f50b830b601a8828ff8a25ab7afe3e262386 not found: ID does not exist" Dec 05 17:36:34 crc kubenswrapper[4753]: I1205 17:36:34.354305 4753 scope.go:117] "RemoveContainer" containerID="5161150f0b5aed869a50fc3fbdd8c7218190b1909083c09b1570fd85d739d332" Dec 05 17:36:34 crc kubenswrapper[4753]: I1205 17:36:34.354445 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 17:36:34 crc kubenswrapper[4753]: I1205 17:36:34.354501 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5161150f0b5aed869a50fc3fbdd8c7218190b1909083c09b1570fd85d739d332"} err="failed to get container status \"5161150f0b5aed869a50fc3fbdd8c7218190b1909083c09b1570fd85d739d332\": rpc error: code = NotFound desc = could not find container \"5161150f0b5aed869a50fc3fbdd8c7218190b1909083c09b1570fd85d739d332\": container with ID starting with 5161150f0b5aed869a50fc3fbdd8c7218190b1909083c09b1570fd85d739d332 not found: ID does not exist" Dec 05 17:36:34 crc kubenswrapper[4753]: I1205 17:36:34.417676 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf5f2bae-4136-4518-9ef0-058628b17355-combined-ca-bundle\") pod \"bf5f2bae-4136-4518-9ef0-058628b17355\" (UID: \"bf5f2bae-4136-4518-9ef0-058628b17355\") " Dec 05 17:36:34 crc kubenswrapper[4753]: I1205 17:36:34.417755 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d922n\" (UniqueName: \"kubernetes.io/projected/bf5f2bae-4136-4518-9ef0-058628b17355-kube-api-access-d922n\") pod \"bf5f2bae-4136-4518-9ef0-058628b17355\" (UID: \"bf5f2bae-4136-4518-9ef0-058628b17355\") " Dec 05 17:36:34 crc kubenswrapper[4753]: I1205 17:36:34.417921 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf5f2bae-4136-4518-9ef0-058628b17355-config-data\") pod \"bf5f2bae-4136-4518-9ef0-058628b17355\" (UID: \"bf5f2bae-4136-4518-9ef0-058628b17355\") " Dec 05 17:36:34 crc kubenswrapper[4753]: I1205 17:36:34.418249 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/867981ea-803a-423e-ad91-115d899c9c6a-config-data\") pod \"nova-metadata-0\" (UID: \"867981ea-803a-423e-ad91-115d899c9c6a\") " pod="openstack/nova-metadata-0" Dec 05 17:36:34 crc kubenswrapper[4753]: I1205 17:36:34.418284 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/867981ea-803a-423e-ad91-115d899c9c6a-logs\") pod \"nova-metadata-0\" (UID: \"867981ea-803a-423e-ad91-115d899c9c6a\") " pod="openstack/nova-metadata-0" Dec 05 17:36:34 crc kubenswrapper[4753]: I1205 17:36:34.418328 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/867981ea-803a-423e-ad91-115d899c9c6a-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"867981ea-803a-423e-ad91-115d899c9c6a\") " pod="openstack/nova-metadata-0" Dec 05 17:36:34 crc kubenswrapper[4753]: I1205 17:36:34.418425 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/867981ea-803a-423e-ad91-115d899c9c6a-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"867981ea-803a-423e-ad91-115d899c9c6a\") " pod="openstack/nova-metadata-0" Dec 05 17:36:34 crc kubenswrapper[4753]: I1205 17:36:34.418532 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4mmzb\" (UniqueName: \"kubernetes.io/projected/867981ea-803a-423e-ad91-115d899c9c6a-kube-api-access-4mmzb\") pod \"nova-metadata-0\" (UID: \"867981ea-803a-423e-ad91-115d899c9c6a\") " pod="openstack/nova-metadata-0" Dec 05 17:36:34 crc kubenswrapper[4753]: I1205 17:36:34.454189 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf5f2bae-4136-4518-9ef0-058628b17355-kube-api-access-d922n" (OuterVolumeSpecName: "kube-api-access-d922n") pod "bf5f2bae-4136-4518-9ef0-058628b17355" (UID: "bf5f2bae-4136-4518-9ef0-058628b17355"). InnerVolumeSpecName "kube-api-access-d922n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:36:34 crc kubenswrapper[4753]: I1205 17:36:34.493995 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf5f2bae-4136-4518-9ef0-058628b17355-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bf5f2bae-4136-4518-9ef0-058628b17355" (UID: "bf5f2bae-4136-4518-9ef0-058628b17355"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:36:34 crc kubenswrapper[4753]: I1205 17:36:34.497171 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf5f2bae-4136-4518-9ef0-058628b17355-config-data" (OuterVolumeSpecName: "config-data") pod "bf5f2bae-4136-4518-9ef0-058628b17355" (UID: "bf5f2bae-4136-4518-9ef0-058628b17355"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:36:34 crc kubenswrapper[4753]: I1205 17:36:34.519985 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4mmzb\" (UniqueName: \"kubernetes.io/projected/867981ea-803a-423e-ad91-115d899c9c6a-kube-api-access-4mmzb\") pod \"nova-metadata-0\" (UID: \"867981ea-803a-423e-ad91-115d899c9c6a\") " pod="openstack/nova-metadata-0" Dec 05 17:36:34 crc kubenswrapper[4753]: I1205 17:36:34.520108 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/867981ea-803a-423e-ad91-115d899c9c6a-config-data\") pod \"nova-metadata-0\" (UID: \"867981ea-803a-423e-ad91-115d899c9c6a\") " pod="openstack/nova-metadata-0" Dec 05 17:36:34 crc kubenswrapper[4753]: I1205 17:36:34.520142 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/867981ea-803a-423e-ad91-115d899c9c6a-logs\") pod \"nova-metadata-0\" (UID: \"867981ea-803a-423e-ad91-115d899c9c6a\") " pod="openstack/nova-metadata-0" Dec 05 17:36:34 crc kubenswrapper[4753]: I1205 17:36:34.520200 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/867981ea-803a-423e-ad91-115d899c9c6a-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"867981ea-803a-423e-ad91-115d899c9c6a\") " pod="openstack/nova-metadata-0" Dec 05 17:36:34 crc kubenswrapper[4753]: I1205 17:36:34.520242 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/867981ea-803a-423e-ad91-115d899c9c6a-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"867981ea-803a-423e-ad91-115d899c9c6a\") " pod="openstack/nova-metadata-0" Dec 05 17:36:34 crc kubenswrapper[4753]: I1205 17:36:34.520295 4753 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bf5f2bae-4136-4518-9ef0-058628b17355-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:36:34 crc kubenswrapper[4753]: I1205 17:36:34.520306 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d922n\" (UniqueName: \"kubernetes.io/projected/bf5f2bae-4136-4518-9ef0-058628b17355-kube-api-access-d922n\") on node \"crc\" DevicePath \"\"" Dec 05 17:36:34 crc kubenswrapper[4753]: I1205 17:36:34.520318 4753 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bf5f2bae-4136-4518-9ef0-058628b17355-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:36:34 crc kubenswrapper[4753]: I1205 17:36:34.520716 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/867981ea-803a-423e-ad91-115d899c9c6a-logs\") pod \"nova-metadata-0\" (UID: \"867981ea-803a-423e-ad91-115d899c9c6a\") " pod="openstack/nova-metadata-0" Dec 05 17:36:34 crc kubenswrapper[4753]: I1205 17:36:34.524497 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/867981ea-803a-423e-ad91-115d899c9c6a-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"867981ea-803a-423e-ad91-115d899c9c6a\") " pod="openstack/nova-metadata-0" Dec 05 17:36:34 crc kubenswrapper[4753]: I1205 17:36:34.524567 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/867981ea-803a-423e-ad91-115d899c9c6a-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"867981ea-803a-423e-ad91-115d899c9c6a\") " pod="openstack/nova-metadata-0" Dec 05 17:36:34 crc kubenswrapper[4753]: I1205 17:36:34.526865 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/867981ea-803a-423e-ad91-115d899c9c6a-config-data\") pod \"nova-metadata-0\" (UID: \"867981ea-803a-423e-ad91-115d899c9c6a\") " pod="openstack/nova-metadata-0" Dec 05 17:36:34 crc kubenswrapper[4753]: I1205 17:36:34.538627 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4mmzb\" (UniqueName: \"kubernetes.io/projected/867981ea-803a-423e-ad91-115d899c9c6a-kube-api-access-4mmzb\") pod \"nova-metadata-0\" (UID: \"867981ea-803a-423e-ad91-115d899c9c6a\") " pod="openstack/nova-metadata-0" Dec 05 17:36:34 crc kubenswrapper[4753]: I1205 17:36:34.668636 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 17:36:35 crc kubenswrapper[4753]: W1205 17:36:35.181920 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod867981ea_803a_423e_ad91_115d899c9c6a.slice/crio-0afa11853eafef3f62bdd22bbab36476592fe409e7065a9401c396f1f8cbc6da WatchSource:0}: Error finding container 0afa11853eafef3f62bdd22bbab36476592fe409e7065a9401c396f1f8cbc6da: Status 404 returned error can't find the container with id 0afa11853eafef3f62bdd22bbab36476592fe409e7065a9401c396f1f8cbc6da Dec 05 17:36:35 crc kubenswrapper[4753]: I1205 17:36:35.181986 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 17:36:35 crc kubenswrapper[4753]: I1205 17:36:35.255352 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"696fed1a-38d3-459f-b08b-128b8d41d472","Type":"ContainerStarted","Data":"4f68b2df9a458d6d72d6fb0b119238465e1ed639f1094d8066b99c5e2a3c58ad"} Dec 05 17:36:35 crc kubenswrapper[4753]: I1205 17:36:35.255473 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Dec 05 17:36:35 crc kubenswrapper[4753]: I1205 17:36:35.267450 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"bf5f2bae-4136-4518-9ef0-058628b17355","Type":"ContainerDied","Data":"0a378072587b9be2bd1efa80d382d9d0b2b49c609ce1360ae564ffcdb64f41c1"} Dec 05 17:36:35 crc kubenswrapper[4753]: I1205 17:36:35.267495 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 17:36:35 crc kubenswrapper[4753]: I1205 17:36:35.267535 4753 scope.go:117] "RemoveContainer" containerID="b41e003847efc0799e05b33a863357e5b07c5a8bfa25eeb18360e3c8a532ee5d" Dec 05 17:36:35 crc kubenswrapper[4753]: I1205 17:36:35.270396 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"867981ea-803a-423e-ad91-115d899c9c6a","Type":"ContainerStarted","Data":"0afa11853eafef3f62bdd22bbab36476592fe409e7065a9401c396f1f8cbc6da"} Dec 05 17:36:35 crc kubenswrapper[4753]: I1205 17:36:35.280024 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.280005098 podStartE2EDuration="2.280005098s" podCreationTimestamp="2025-12-05 17:36:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:36:35.274948554 +0000 UTC m=+1933.778055730" watchObservedRunningTime="2025-12-05 17:36:35.280005098 +0000 UTC m=+1933.783112124" Dec 05 17:36:35 crc kubenswrapper[4753]: I1205 17:36:35.328077 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 17:36:35 crc kubenswrapper[4753]: I1205 17:36:35.346713 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 17:36:35 crc kubenswrapper[4753]: I1205 17:36:35.368893 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 17:36:35 crc kubenswrapper[4753]: I1205 17:36:35.370401 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 17:36:35 crc kubenswrapper[4753]: I1205 17:36:35.375176 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 05 17:36:35 crc kubenswrapper[4753]: I1205 17:36:35.378847 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 17:36:35 crc kubenswrapper[4753]: I1205 17:36:35.441757 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zdn44\" (UniqueName: \"kubernetes.io/projected/4af889e3-58fc-4d21-8c35-28e6166eba2e-kube-api-access-zdn44\") pod \"nova-scheduler-0\" (UID: \"4af889e3-58fc-4d21-8c35-28e6166eba2e\") " pod="openstack/nova-scheduler-0" Dec 05 17:36:35 crc kubenswrapper[4753]: I1205 17:36:35.441840 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4af889e3-58fc-4d21-8c35-28e6166eba2e-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"4af889e3-58fc-4d21-8c35-28e6166eba2e\") " pod="openstack/nova-scheduler-0" Dec 05 17:36:35 crc kubenswrapper[4753]: I1205 17:36:35.441864 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4af889e3-58fc-4d21-8c35-28e6166eba2e-config-data\") pod \"nova-scheduler-0\" (UID: \"4af889e3-58fc-4d21-8c35-28e6166eba2e\") " pod="openstack/nova-scheduler-0" Dec 05 17:36:35 crc kubenswrapper[4753]: I1205 17:36:35.543729 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zdn44\" (UniqueName: \"kubernetes.io/projected/4af889e3-58fc-4d21-8c35-28e6166eba2e-kube-api-access-zdn44\") pod \"nova-scheduler-0\" (UID: \"4af889e3-58fc-4d21-8c35-28e6166eba2e\") " pod="openstack/nova-scheduler-0" Dec 05 17:36:35 crc kubenswrapper[4753]: I1205 17:36:35.543816 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4af889e3-58fc-4d21-8c35-28e6166eba2e-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"4af889e3-58fc-4d21-8c35-28e6166eba2e\") " pod="openstack/nova-scheduler-0" Dec 05 17:36:35 crc kubenswrapper[4753]: I1205 17:36:35.543842 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4af889e3-58fc-4d21-8c35-28e6166eba2e-config-data\") pod \"nova-scheduler-0\" (UID: \"4af889e3-58fc-4d21-8c35-28e6166eba2e\") " pod="openstack/nova-scheduler-0" Dec 05 17:36:35 crc kubenswrapper[4753]: I1205 17:36:35.547873 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4af889e3-58fc-4d21-8c35-28e6166eba2e-config-data\") pod \"nova-scheduler-0\" (UID: \"4af889e3-58fc-4d21-8c35-28e6166eba2e\") " pod="openstack/nova-scheduler-0" Dec 05 17:36:35 crc kubenswrapper[4753]: I1205 17:36:35.551341 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4af889e3-58fc-4d21-8c35-28e6166eba2e-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"4af889e3-58fc-4d21-8c35-28e6166eba2e\") " pod="openstack/nova-scheduler-0" Dec 05 17:36:35 crc kubenswrapper[4753]: I1205 17:36:35.564967 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zdn44\" (UniqueName: \"kubernetes.io/projected/4af889e3-58fc-4d21-8c35-28e6166eba2e-kube-api-access-zdn44\") pod \"nova-scheduler-0\" (UID: \"4af889e3-58fc-4d21-8c35-28e6166eba2e\") " pod="openstack/nova-scheduler-0" Dec 05 17:36:35 crc kubenswrapper[4753]: I1205 17:36:35.695173 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 17:36:35 crc kubenswrapper[4753]: I1205 17:36:35.740770 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17" path="/var/lib/kubelet/pods/b5d1526e-6ea4-41d5-ac9c-3d3a87af8f17/volumes" Dec 05 17:36:35 crc kubenswrapper[4753]: I1205 17:36:35.741493 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf5f2bae-4136-4518-9ef0-058628b17355" path="/var/lib/kubelet/pods/bf5f2bae-4136-4518-9ef0-058628b17355/volumes" Dec 05 17:36:36 crc kubenswrapper[4753]: W1205 17:36:36.247686 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4af889e3_58fc_4d21_8c35_28e6166eba2e.slice/crio-36cc96fb2b5a6e429e82cf745153d04d0ece1123165f89c4cc3d0ca25bb5e13c WatchSource:0}: Error finding container 36cc96fb2b5a6e429e82cf745153d04d0ece1123165f89c4cc3d0ca25bb5e13c: Status 404 returned error can't find the container with id 36cc96fb2b5a6e429e82cf745153d04d0ece1123165f89c4cc3d0ca25bb5e13c Dec 05 17:36:36 crc kubenswrapper[4753]: I1205 17:36:36.250903 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 17:36:36 crc kubenswrapper[4753]: I1205 17:36:36.315838 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"867981ea-803a-423e-ad91-115d899c9c6a","Type":"ContainerStarted","Data":"d4c4b05743bc899ab80f5a44d444e162dd322a02fff7afe9f5fe461afc7187a6"} Dec 05 17:36:36 crc kubenswrapper[4753]: I1205 17:36:36.315897 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"867981ea-803a-423e-ad91-115d899c9c6a","Type":"ContainerStarted","Data":"12a707423e20f5cecc5adbc25e31f6a7ce22c86fc1d25fd7a0194cc838461c3b"} Dec 05 17:36:36 crc kubenswrapper[4753]: I1205 17:36:36.320560 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"4af889e3-58fc-4d21-8c35-28e6166eba2e","Type":"ContainerStarted","Data":"36cc96fb2b5a6e429e82cf745153d04d0ece1123165f89c4cc3d0ca25bb5e13c"} Dec 05 17:36:36 crc kubenswrapper[4753]: I1205 17:36:36.339961 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.339941619 podStartE2EDuration="2.339941619s" podCreationTimestamp="2025-12-05 17:36:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:36:36.337777977 +0000 UTC m=+1934.840885013" watchObservedRunningTime="2025-12-05 17:36:36.339941619 +0000 UTC m=+1934.843048635" Dec 05 17:36:37 crc kubenswrapper[4753]: I1205 17:36:37.332957 4753 generic.go:334] "Generic (PLEG): container finished" podID="ded4654b-7fab-4413-a3e9-f6b746d6c690" containerID="91936b53f6d173ba40cc9d5d11598feb1be04f8379582af2b59ecec4b9e83a04" exitCode=0 Dec 05 17:36:37 crc kubenswrapper[4753]: I1205 17:36:37.333020 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ded4654b-7fab-4413-a3e9-f6b746d6c690","Type":"ContainerDied","Data":"91936b53f6d173ba40cc9d5d11598feb1be04f8379582af2b59ecec4b9e83a04"} Dec 05 17:36:37 crc kubenswrapper[4753]: I1205 17:36:37.334863 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"4af889e3-58fc-4d21-8c35-28e6166eba2e","Type":"ContainerStarted","Data":"1d44cbc3c67c8a7544913e19dbca97fea44114f6c0933be8d9502d19824f99c3"} Dec 05 17:36:37 crc kubenswrapper[4753]: I1205 17:36:37.369466 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.369444967 podStartE2EDuration="2.369444967s" podCreationTimestamp="2025-12-05 17:36:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:36:37.355258754 +0000 UTC m=+1935.858365760" watchObservedRunningTime="2025-12-05 17:36:37.369444967 +0000 UTC m=+1935.872551993" Dec 05 17:36:37 crc kubenswrapper[4753]: I1205 17:36:37.435686 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 17:36:37 crc kubenswrapper[4753]: I1205 17:36:37.616573 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n99n9\" (UniqueName: \"kubernetes.io/projected/ded4654b-7fab-4413-a3e9-f6b746d6c690-kube-api-access-n99n9\") pod \"ded4654b-7fab-4413-a3e9-f6b746d6c690\" (UID: \"ded4654b-7fab-4413-a3e9-f6b746d6c690\") " Dec 05 17:36:37 crc kubenswrapper[4753]: I1205 17:36:37.616900 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ded4654b-7fab-4413-a3e9-f6b746d6c690-logs\") pod \"ded4654b-7fab-4413-a3e9-f6b746d6c690\" (UID: \"ded4654b-7fab-4413-a3e9-f6b746d6c690\") " Dec 05 17:36:37 crc kubenswrapper[4753]: I1205 17:36:37.616953 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ded4654b-7fab-4413-a3e9-f6b746d6c690-combined-ca-bundle\") pod \"ded4654b-7fab-4413-a3e9-f6b746d6c690\" (UID: \"ded4654b-7fab-4413-a3e9-f6b746d6c690\") " Dec 05 17:36:37 crc kubenswrapper[4753]: I1205 17:36:37.617089 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ded4654b-7fab-4413-a3e9-f6b746d6c690-config-data\") pod \"ded4654b-7fab-4413-a3e9-f6b746d6c690\" (UID: \"ded4654b-7fab-4413-a3e9-f6b746d6c690\") " Dec 05 17:36:37 crc kubenswrapper[4753]: I1205 17:36:37.617484 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ded4654b-7fab-4413-a3e9-f6b746d6c690-logs" (OuterVolumeSpecName: "logs") pod "ded4654b-7fab-4413-a3e9-f6b746d6c690" (UID: "ded4654b-7fab-4413-a3e9-f6b746d6c690"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:36:37 crc kubenswrapper[4753]: I1205 17:36:37.618381 4753 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ded4654b-7fab-4413-a3e9-f6b746d6c690-logs\") on node \"crc\" DevicePath \"\"" Dec 05 17:36:37 crc kubenswrapper[4753]: I1205 17:36:37.627128 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ded4654b-7fab-4413-a3e9-f6b746d6c690-kube-api-access-n99n9" (OuterVolumeSpecName: "kube-api-access-n99n9") pod "ded4654b-7fab-4413-a3e9-f6b746d6c690" (UID: "ded4654b-7fab-4413-a3e9-f6b746d6c690"). InnerVolumeSpecName "kube-api-access-n99n9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:36:37 crc kubenswrapper[4753]: I1205 17:36:37.650718 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ded4654b-7fab-4413-a3e9-f6b746d6c690-config-data" (OuterVolumeSpecName: "config-data") pod "ded4654b-7fab-4413-a3e9-f6b746d6c690" (UID: "ded4654b-7fab-4413-a3e9-f6b746d6c690"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:36:37 crc kubenswrapper[4753]: I1205 17:36:37.668505 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ded4654b-7fab-4413-a3e9-f6b746d6c690-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ded4654b-7fab-4413-a3e9-f6b746d6c690" (UID: "ded4654b-7fab-4413-a3e9-f6b746d6c690"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:36:37 crc kubenswrapper[4753]: I1205 17:36:37.720541 4753 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ded4654b-7fab-4413-a3e9-f6b746d6c690-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:36:37 crc kubenswrapper[4753]: I1205 17:36:37.720583 4753 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ded4654b-7fab-4413-a3e9-f6b746d6c690-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:36:37 crc kubenswrapper[4753]: I1205 17:36:37.720592 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n99n9\" (UniqueName: \"kubernetes.io/projected/ded4654b-7fab-4413-a3e9-f6b746d6c690-kube-api-access-n99n9\") on node \"crc\" DevicePath \"\"" Dec 05 17:36:38 crc kubenswrapper[4753]: I1205 17:36:38.349706 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ded4654b-7fab-4413-a3e9-f6b746d6c690","Type":"ContainerDied","Data":"c61bb3b1f04d7f77f68f661c99a4927eee17ab1301821d96a82131da1faedc05"} Dec 05 17:36:38 crc kubenswrapper[4753]: I1205 17:36:38.350226 4753 scope.go:117] "RemoveContainer" containerID="91936b53f6d173ba40cc9d5d11598feb1be04f8379582af2b59ecec4b9e83a04" Dec 05 17:36:38 crc kubenswrapper[4753]: I1205 17:36:38.349758 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 17:36:38 crc kubenswrapper[4753]: I1205 17:36:38.387418 4753 scope.go:117] "RemoveContainer" containerID="2f36662f0424c142c0ba053a8a9f4261cf493984be35ff3f6b9a3900e618e79e" Dec 05 17:36:38 crc kubenswrapper[4753]: I1205 17:36:38.399479 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 17:36:38 crc kubenswrapper[4753]: I1205 17:36:38.415637 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 05 17:36:38 crc kubenswrapper[4753]: I1205 17:36:38.453816 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 05 17:36:38 crc kubenswrapper[4753]: E1205 17:36:38.454359 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ded4654b-7fab-4413-a3e9-f6b746d6c690" containerName="nova-api-log" Dec 05 17:36:38 crc kubenswrapper[4753]: I1205 17:36:38.454379 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="ded4654b-7fab-4413-a3e9-f6b746d6c690" containerName="nova-api-log" Dec 05 17:36:38 crc kubenswrapper[4753]: E1205 17:36:38.454427 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ded4654b-7fab-4413-a3e9-f6b746d6c690" containerName="nova-api-api" Dec 05 17:36:38 crc kubenswrapper[4753]: I1205 17:36:38.454436 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="ded4654b-7fab-4413-a3e9-f6b746d6c690" containerName="nova-api-api" Dec 05 17:36:38 crc kubenswrapper[4753]: I1205 17:36:38.454647 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="ded4654b-7fab-4413-a3e9-f6b746d6c690" containerName="nova-api-api" Dec 05 17:36:38 crc kubenswrapper[4753]: I1205 17:36:38.454661 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="ded4654b-7fab-4413-a3e9-f6b746d6c690" containerName="nova-api-log" Dec 05 17:36:38 crc kubenswrapper[4753]: I1205 17:36:38.455912 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 17:36:38 crc kubenswrapper[4753]: I1205 17:36:38.461644 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 05 17:36:38 crc kubenswrapper[4753]: I1205 17:36:38.477085 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 17:36:38 crc kubenswrapper[4753]: I1205 17:36:38.643977 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/749effd3-8555-4349-8d88-c17817648b03-config-data\") pod \"nova-api-0\" (UID: \"749effd3-8555-4349-8d88-c17817648b03\") " pod="openstack/nova-api-0" Dec 05 17:36:38 crc kubenswrapper[4753]: I1205 17:36:38.644196 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/749effd3-8555-4349-8d88-c17817648b03-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"749effd3-8555-4349-8d88-c17817648b03\") " pod="openstack/nova-api-0" Dec 05 17:36:38 crc kubenswrapper[4753]: I1205 17:36:38.644420 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s6wl6\" (UniqueName: \"kubernetes.io/projected/749effd3-8555-4349-8d88-c17817648b03-kube-api-access-s6wl6\") pod \"nova-api-0\" (UID: \"749effd3-8555-4349-8d88-c17817648b03\") " pod="openstack/nova-api-0" Dec 05 17:36:38 crc kubenswrapper[4753]: I1205 17:36:38.644484 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/749effd3-8555-4349-8d88-c17817648b03-logs\") pod \"nova-api-0\" (UID: \"749effd3-8555-4349-8d88-c17817648b03\") " pod="openstack/nova-api-0" Dec 05 17:36:38 crc kubenswrapper[4753]: I1205 17:36:38.747033 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/749effd3-8555-4349-8d88-c17817648b03-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"749effd3-8555-4349-8d88-c17817648b03\") " pod="openstack/nova-api-0" Dec 05 17:36:38 crc kubenswrapper[4753]: I1205 17:36:38.747175 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s6wl6\" (UniqueName: \"kubernetes.io/projected/749effd3-8555-4349-8d88-c17817648b03-kube-api-access-s6wl6\") pod \"nova-api-0\" (UID: \"749effd3-8555-4349-8d88-c17817648b03\") " pod="openstack/nova-api-0" Dec 05 17:36:38 crc kubenswrapper[4753]: I1205 17:36:38.747226 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/749effd3-8555-4349-8d88-c17817648b03-logs\") pod \"nova-api-0\" (UID: \"749effd3-8555-4349-8d88-c17817648b03\") " pod="openstack/nova-api-0" Dec 05 17:36:38 crc kubenswrapper[4753]: I1205 17:36:38.747629 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/749effd3-8555-4349-8d88-c17817648b03-config-data\") pod \"nova-api-0\" (UID: \"749effd3-8555-4349-8d88-c17817648b03\") " pod="openstack/nova-api-0" Dec 05 17:36:38 crc kubenswrapper[4753]: I1205 17:36:38.748575 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/749effd3-8555-4349-8d88-c17817648b03-logs\") pod \"nova-api-0\" (UID: \"749effd3-8555-4349-8d88-c17817648b03\") " pod="openstack/nova-api-0" Dec 05 17:36:38 crc kubenswrapper[4753]: I1205 17:36:38.762196 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/749effd3-8555-4349-8d88-c17817648b03-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"749effd3-8555-4349-8d88-c17817648b03\") " pod="openstack/nova-api-0" Dec 05 17:36:38 crc kubenswrapper[4753]: I1205 17:36:38.773632 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/749effd3-8555-4349-8d88-c17817648b03-config-data\") pod \"nova-api-0\" (UID: \"749effd3-8555-4349-8d88-c17817648b03\") " pod="openstack/nova-api-0" Dec 05 17:36:38 crc kubenswrapper[4753]: I1205 17:36:38.787666 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s6wl6\" (UniqueName: \"kubernetes.io/projected/749effd3-8555-4349-8d88-c17817648b03-kube-api-access-s6wl6\") pod \"nova-api-0\" (UID: \"749effd3-8555-4349-8d88-c17817648b03\") " pod="openstack/nova-api-0" Dec 05 17:36:38 crc kubenswrapper[4753]: I1205 17:36:38.788417 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 17:36:39 crc kubenswrapper[4753]: W1205 17:36:39.336929 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod749effd3_8555_4349_8d88_c17817648b03.slice/crio-6fe8e57ffbc5b90a89ec3133fa8886fc29df5260c2b2b8586d4dd5ad61dd3c0c WatchSource:0}: Error finding container 6fe8e57ffbc5b90a89ec3133fa8886fc29df5260c2b2b8586d4dd5ad61dd3c0c: Status 404 returned error can't find the container with id 6fe8e57ffbc5b90a89ec3133fa8886fc29df5260c2b2b8586d4dd5ad61dd3c0c Dec 05 17:36:39 crc kubenswrapper[4753]: I1205 17:36:39.338329 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 17:36:39 crc kubenswrapper[4753]: I1205 17:36:39.388754 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"749effd3-8555-4349-8d88-c17817648b03","Type":"ContainerStarted","Data":"6fe8e57ffbc5b90a89ec3133fa8886fc29df5260c2b2b8586d4dd5ad61dd3c0c"} Dec 05 17:36:39 crc kubenswrapper[4753]: I1205 17:36:39.669584 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 17:36:39 crc kubenswrapper[4753]: I1205 17:36:39.669933 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 17:36:39 crc kubenswrapper[4753]: I1205 17:36:39.735261 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ded4654b-7fab-4413-a3e9-f6b746d6c690" path="/var/lib/kubelet/pods/ded4654b-7fab-4413-a3e9-f6b746d6c690/volumes" Dec 05 17:36:40 crc kubenswrapper[4753]: I1205 17:36:40.414572 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"749effd3-8555-4349-8d88-c17817648b03","Type":"ContainerStarted","Data":"62f3f6a71c76e7680bb62a4110755c445b50ea8af3d0403788bd548027189d24"} Dec 05 17:36:40 crc kubenswrapper[4753]: I1205 17:36:40.414976 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"749effd3-8555-4349-8d88-c17817648b03","Type":"ContainerStarted","Data":"1c4fe30f3fe65a56accfa564dbbbf7813bfb66ab0282e0dc77e06f3a1b69682a"} Dec 05 17:36:40 crc kubenswrapper[4753]: I1205 17:36:40.444316 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.444283959 podStartE2EDuration="2.444283959s" podCreationTimestamp="2025-12-05 17:36:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:36:40.441264283 +0000 UTC m=+1938.944371349" watchObservedRunningTime="2025-12-05 17:36:40.444283959 +0000 UTC m=+1938.947391005" Dec 05 17:36:40 crc kubenswrapper[4753]: I1205 17:36:40.695653 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 05 17:36:43 crc kubenswrapper[4753]: I1205 17:36:43.681461 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Dec 05 17:36:44 crc kubenswrapper[4753]: I1205 17:36:44.669842 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 05 17:36:44 crc kubenswrapper[4753]: I1205 17:36:44.670229 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 05 17:36:44 crc kubenswrapper[4753]: I1205 17:36:44.721204 4753 scope.go:117] "RemoveContainer" containerID="87682a74661e693e498cd793cc20d16fc9f4a3b8a1a6b54f10285e2dcd15eafd" Dec 05 17:36:44 crc kubenswrapper[4753]: E1205 17:36:44.721579 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 17:36:45 crc kubenswrapper[4753]: I1205 17:36:45.683369 4753 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="867981ea-803a-423e-ad91-115d899c9c6a" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.220:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 17:36:45 crc kubenswrapper[4753]: I1205 17:36:45.683367 4753 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="867981ea-803a-423e-ad91-115d899c9c6a" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.220:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 17:36:45 crc kubenswrapper[4753]: I1205 17:36:45.696915 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 05 17:36:45 crc kubenswrapper[4753]: I1205 17:36:45.765030 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 05 17:36:46 crc kubenswrapper[4753]: I1205 17:36:46.539314 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 05 17:36:48 crc kubenswrapper[4753]: I1205 17:36:48.188187 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 05 17:36:48 crc kubenswrapper[4753]: I1205 17:36:48.789103 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 17:36:48 crc kubenswrapper[4753]: I1205 17:36:48.789192 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 17:36:49 crc kubenswrapper[4753]: I1205 17:36:49.872450 4753 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="749effd3-8555-4349-8d88-c17817648b03" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.222:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 17:36:49 crc kubenswrapper[4753]: I1205 17:36:49.872458 4753 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="749effd3-8555-4349-8d88-c17817648b03" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.222:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 17:36:52 crc kubenswrapper[4753]: I1205 17:36:52.726981 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 17:36:52 crc kubenswrapper[4753]: I1205 17:36:52.727689 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="28078f95-1316-46c1-9dda-9912561aa4e4" containerName="kube-state-metrics" containerID="cri-o://2a0d5dd008371247a1e90fee309ca09991d490d0b2928249372bd98c5bb0f9fa" gracePeriod=30 Dec 05 17:36:53 crc kubenswrapper[4753]: I1205 17:36:53.591613 4753 generic.go:334] "Generic (PLEG): container finished" podID="28078f95-1316-46c1-9dda-9912561aa4e4" containerID="2a0d5dd008371247a1e90fee309ca09991d490d0b2928249372bd98c5bb0f9fa" exitCode=2 Dec 05 17:36:53 crc kubenswrapper[4753]: I1205 17:36:53.591779 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"28078f95-1316-46c1-9dda-9912561aa4e4","Type":"ContainerDied","Data":"2a0d5dd008371247a1e90fee309ca09991d490d0b2928249372bd98c5bb0f9fa"} Dec 05 17:36:53 crc kubenswrapper[4753]: I1205 17:36:53.929512 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 17:36:54 crc kubenswrapper[4753]: I1205 17:36:54.097625 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q2vzc\" (UniqueName: \"kubernetes.io/projected/28078f95-1316-46c1-9dda-9912561aa4e4-kube-api-access-q2vzc\") pod \"28078f95-1316-46c1-9dda-9912561aa4e4\" (UID: \"28078f95-1316-46c1-9dda-9912561aa4e4\") " Dec 05 17:36:54 crc kubenswrapper[4753]: I1205 17:36:54.106092 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/28078f95-1316-46c1-9dda-9912561aa4e4-kube-api-access-q2vzc" (OuterVolumeSpecName: "kube-api-access-q2vzc") pod "28078f95-1316-46c1-9dda-9912561aa4e4" (UID: "28078f95-1316-46c1-9dda-9912561aa4e4"). InnerVolumeSpecName "kube-api-access-q2vzc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:36:54 crc kubenswrapper[4753]: I1205 17:36:54.200226 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q2vzc\" (UniqueName: \"kubernetes.io/projected/28078f95-1316-46c1-9dda-9912561aa4e4-kube-api-access-q2vzc\") on node \"crc\" DevicePath \"\"" Dec 05 17:36:54 crc kubenswrapper[4753]: I1205 17:36:54.606449 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"28078f95-1316-46c1-9dda-9912561aa4e4","Type":"ContainerDied","Data":"128409f6754423fcd428f2bce32cd77e047aa65cdd30460e786ce9f76528ddd2"} Dec 05 17:36:54 crc kubenswrapper[4753]: I1205 17:36:54.606502 4753 scope.go:117] "RemoveContainer" containerID="2a0d5dd008371247a1e90fee309ca09991d490d0b2928249372bd98c5bb0f9fa" Dec 05 17:36:54 crc kubenswrapper[4753]: I1205 17:36:54.606718 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 17:36:54 crc kubenswrapper[4753]: I1205 17:36:54.670333 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 17:36:54 crc kubenswrapper[4753]: I1205 17:36:54.685389 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 17:36:54 crc kubenswrapper[4753]: I1205 17:36:54.690246 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 05 17:36:54 crc kubenswrapper[4753]: I1205 17:36:54.697568 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 17:36:54 crc kubenswrapper[4753]: E1205 17:36:54.698086 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28078f95-1316-46c1-9dda-9912561aa4e4" containerName="kube-state-metrics" Dec 05 17:36:54 crc kubenswrapper[4753]: I1205 17:36:54.698098 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="28078f95-1316-46c1-9dda-9912561aa4e4" containerName="kube-state-metrics" Dec 05 17:36:54 crc kubenswrapper[4753]: I1205 17:36:54.698724 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="28078f95-1316-46c1-9dda-9912561aa4e4" containerName="kube-state-metrics" Dec 05 17:36:54 crc kubenswrapper[4753]: I1205 17:36:54.699580 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 17:36:54 crc kubenswrapper[4753]: I1205 17:36:54.710607 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 17:36:54 crc kubenswrapper[4753]: I1205 17:36:54.723565 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Dec 05 17:36:54 crc kubenswrapper[4753]: I1205 17:36:54.723665 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Dec 05 17:36:54 crc kubenswrapper[4753]: I1205 17:36:54.724073 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 05 17:36:54 crc kubenswrapper[4753]: I1205 17:36:54.730967 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 05 17:36:54 crc kubenswrapper[4753]: I1205 17:36:54.827323 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/37b1ed0b-5977-4294-b5c4-0d9d0abd6520-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"37b1ed0b-5977-4294-b5c4-0d9d0abd6520\") " pod="openstack/kube-state-metrics-0" Dec 05 17:36:54 crc kubenswrapper[4753]: I1205 17:36:54.827389 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/37b1ed0b-5977-4294-b5c4-0d9d0abd6520-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"37b1ed0b-5977-4294-b5c4-0d9d0abd6520\") " pod="openstack/kube-state-metrics-0" Dec 05 17:36:54 crc kubenswrapper[4753]: I1205 17:36:54.827483 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37b1ed0b-5977-4294-b5c4-0d9d0abd6520-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"37b1ed0b-5977-4294-b5c4-0d9d0abd6520\") " pod="openstack/kube-state-metrics-0" Dec 05 17:36:54 crc kubenswrapper[4753]: I1205 17:36:54.827532 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qp84w\" (UniqueName: \"kubernetes.io/projected/37b1ed0b-5977-4294-b5c4-0d9d0abd6520-kube-api-access-qp84w\") pod \"kube-state-metrics-0\" (UID: \"37b1ed0b-5977-4294-b5c4-0d9d0abd6520\") " pod="openstack/kube-state-metrics-0" Dec 05 17:36:54 crc kubenswrapper[4753]: I1205 17:36:54.875769 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:36:54 crc kubenswrapper[4753]: I1205 17:36:54.876052 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="cdd3659b-07fa-436e-93f0-b6b55fcead58" containerName="ceilometer-central-agent" containerID="cri-o://cd1a97ef33bbfa59eae87dbb1a186e797c7a2f5c8003981a5a799d327e2dc85a" gracePeriod=30 Dec 05 17:36:54 crc kubenswrapper[4753]: I1205 17:36:54.876110 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="cdd3659b-07fa-436e-93f0-b6b55fcead58" containerName="ceilometer-notification-agent" containerID="cri-o://f8d8c6b51e78b11df564815a8800e8f02479847d968808269980faba71113b94" gracePeriod=30 Dec 05 17:36:54 crc kubenswrapper[4753]: I1205 17:36:54.876110 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="cdd3659b-07fa-436e-93f0-b6b55fcead58" containerName="proxy-httpd" containerID="cri-o://368ddfd5ece7e3ca53b5af287ae2501e74ffc1b7945c30fac3f67b1d3437c325" gracePeriod=30 Dec 05 17:36:54 crc kubenswrapper[4753]: I1205 17:36:54.876120 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="cdd3659b-07fa-436e-93f0-b6b55fcead58" containerName="sg-core" containerID="cri-o://66cf734570f26d4816f153692dac7b97d2bb5769dfd7e28d005ee6a7e6c8d96b" gracePeriod=30 Dec 05 17:36:54 crc kubenswrapper[4753]: I1205 17:36:54.929675 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/37b1ed0b-5977-4294-b5c4-0d9d0abd6520-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"37b1ed0b-5977-4294-b5c4-0d9d0abd6520\") " pod="openstack/kube-state-metrics-0" Dec 05 17:36:54 crc kubenswrapper[4753]: I1205 17:36:54.929747 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/37b1ed0b-5977-4294-b5c4-0d9d0abd6520-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"37b1ed0b-5977-4294-b5c4-0d9d0abd6520\") " pod="openstack/kube-state-metrics-0" Dec 05 17:36:54 crc kubenswrapper[4753]: I1205 17:36:54.929824 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37b1ed0b-5977-4294-b5c4-0d9d0abd6520-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"37b1ed0b-5977-4294-b5c4-0d9d0abd6520\") " pod="openstack/kube-state-metrics-0" Dec 05 17:36:54 crc kubenswrapper[4753]: I1205 17:36:54.929859 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qp84w\" (UniqueName: \"kubernetes.io/projected/37b1ed0b-5977-4294-b5c4-0d9d0abd6520-kube-api-access-qp84w\") pod \"kube-state-metrics-0\" (UID: \"37b1ed0b-5977-4294-b5c4-0d9d0abd6520\") " pod="openstack/kube-state-metrics-0" Dec 05 17:36:54 crc kubenswrapper[4753]: I1205 17:36:54.935206 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/37b1ed0b-5977-4294-b5c4-0d9d0abd6520-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"37b1ed0b-5977-4294-b5c4-0d9d0abd6520\") " pod="openstack/kube-state-metrics-0" Dec 05 17:36:54 crc kubenswrapper[4753]: I1205 17:36:54.935721 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/37b1ed0b-5977-4294-b5c4-0d9d0abd6520-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"37b1ed0b-5977-4294-b5c4-0d9d0abd6520\") " pod="openstack/kube-state-metrics-0" Dec 05 17:36:54 crc kubenswrapper[4753]: I1205 17:36:54.936387 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37b1ed0b-5977-4294-b5c4-0d9d0abd6520-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"37b1ed0b-5977-4294-b5c4-0d9d0abd6520\") " pod="openstack/kube-state-metrics-0" Dec 05 17:36:54 crc kubenswrapper[4753]: I1205 17:36:54.948867 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qp84w\" (UniqueName: \"kubernetes.io/projected/37b1ed0b-5977-4294-b5c4-0d9d0abd6520-kube-api-access-qp84w\") pod \"kube-state-metrics-0\" (UID: \"37b1ed0b-5977-4294-b5c4-0d9d0abd6520\") " pod="openstack/kube-state-metrics-0" Dec 05 17:36:55 crc kubenswrapper[4753]: I1205 17:36:55.049467 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 17:36:55 crc kubenswrapper[4753]: I1205 17:36:55.620004 4753 generic.go:334] "Generic (PLEG): container finished" podID="cdd3659b-07fa-436e-93f0-b6b55fcead58" containerID="368ddfd5ece7e3ca53b5af287ae2501e74ffc1b7945c30fac3f67b1d3437c325" exitCode=0 Dec 05 17:36:55 crc kubenswrapper[4753]: I1205 17:36:55.620251 4753 generic.go:334] "Generic (PLEG): container finished" podID="cdd3659b-07fa-436e-93f0-b6b55fcead58" containerID="66cf734570f26d4816f153692dac7b97d2bb5769dfd7e28d005ee6a7e6c8d96b" exitCode=2 Dec 05 17:36:55 crc kubenswrapper[4753]: I1205 17:36:55.620260 4753 generic.go:334] "Generic (PLEG): container finished" podID="cdd3659b-07fa-436e-93f0-b6b55fcead58" containerID="cd1a97ef33bbfa59eae87dbb1a186e797c7a2f5c8003981a5a799d327e2dc85a" exitCode=0 Dec 05 17:36:55 crc kubenswrapper[4753]: I1205 17:36:55.620494 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cdd3659b-07fa-436e-93f0-b6b55fcead58","Type":"ContainerDied","Data":"368ddfd5ece7e3ca53b5af287ae2501e74ffc1b7945c30fac3f67b1d3437c325"} Dec 05 17:36:55 crc kubenswrapper[4753]: I1205 17:36:55.620530 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cdd3659b-07fa-436e-93f0-b6b55fcead58","Type":"ContainerDied","Data":"66cf734570f26d4816f153692dac7b97d2bb5769dfd7e28d005ee6a7e6c8d96b"} Dec 05 17:36:55 crc kubenswrapper[4753]: I1205 17:36:55.620543 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cdd3659b-07fa-436e-93f0-b6b55fcead58","Type":"ContainerDied","Data":"cd1a97ef33bbfa59eae87dbb1a186e797c7a2f5c8003981a5a799d327e2dc85a"} Dec 05 17:36:55 crc kubenswrapper[4753]: I1205 17:36:55.631217 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 05 17:36:55 crc kubenswrapper[4753]: I1205 17:36:55.741783 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="28078f95-1316-46c1-9dda-9912561aa4e4" path="/var/lib/kubelet/pods/28078f95-1316-46c1-9dda-9912561aa4e4/volumes" Dec 05 17:36:56 crc kubenswrapper[4753]: I1205 17:36:56.233233 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 17:36:56 crc kubenswrapper[4753]: I1205 17:36:56.580288 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 17:36:56 crc kubenswrapper[4753]: I1205 17:36:56.630414 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"37b1ed0b-5977-4294-b5c4-0d9d0abd6520","Type":"ContainerStarted","Data":"c1989809c1a76bfe37e3734ea3d6d3758c63a3b8499d9045c2481a802f6b1f48"} Dec 05 17:36:56 crc kubenswrapper[4753]: I1205 17:36:56.632845 4753 generic.go:334] "Generic (PLEG): container finished" podID="28c73692-1325-4815-83ed-e0ae3aec6901" containerID="7d4f0a458b9c0687936b01c6787314109f1ba22173f7fa7f4076977691bc3f12" exitCode=137 Dec 05 17:36:56 crc kubenswrapper[4753]: I1205 17:36:56.632888 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"28c73692-1325-4815-83ed-e0ae3aec6901","Type":"ContainerDied","Data":"7d4f0a458b9c0687936b01c6787314109f1ba22173f7fa7f4076977691bc3f12"} Dec 05 17:36:56 crc kubenswrapper[4753]: I1205 17:36:56.632923 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"28c73692-1325-4815-83ed-e0ae3aec6901","Type":"ContainerDied","Data":"ced7d7db7ba114f8821e6a4e5a1d17aa84e67c1f83d578309d9ea13425a9d426"} Dec 05 17:36:56 crc kubenswrapper[4753]: I1205 17:36:56.632975 4753 scope.go:117] "RemoveContainer" containerID="7d4f0a458b9c0687936b01c6787314109f1ba22173f7fa7f4076977691bc3f12" Dec 05 17:36:56 crc kubenswrapper[4753]: I1205 17:36:56.633015 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 17:36:56 crc kubenswrapper[4753]: I1205 17:36:56.670797 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28c73692-1325-4815-83ed-e0ae3aec6901-combined-ca-bundle\") pod \"28c73692-1325-4815-83ed-e0ae3aec6901\" (UID: \"28c73692-1325-4815-83ed-e0ae3aec6901\") " Dec 05 17:36:56 crc kubenswrapper[4753]: I1205 17:36:56.671902 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9fpfw\" (UniqueName: \"kubernetes.io/projected/28c73692-1325-4815-83ed-e0ae3aec6901-kube-api-access-9fpfw\") pod \"28c73692-1325-4815-83ed-e0ae3aec6901\" (UID: \"28c73692-1325-4815-83ed-e0ae3aec6901\") " Dec 05 17:36:56 crc kubenswrapper[4753]: I1205 17:36:56.672051 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28c73692-1325-4815-83ed-e0ae3aec6901-config-data\") pod \"28c73692-1325-4815-83ed-e0ae3aec6901\" (UID: \"28c73692-1325-4815-83ed-e0ae3aec6901\") " Dec 05 17:36:56 crc kubenswrapper[4753]: I1205 17:36:56.676800 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/28c73692-1325-4815-83ed-e0ae3aec6901-kube-api-access-9fpfw" (OuterVolumeSpecName: "kube-api-access-9fpfw") pod "28c73692-1325-4815-83ed-e0ae3aec6901" (UID: "28c73692-1325-4815-83ed-e0ae3aec6901"). InnerVolumeSpecName "kube-api-access-9fpfw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:36:56 crc kubenswrapper[4753]: I1205 17:36:56.701451 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/28c73692-1325-4815-83ed-e0ae3aec6901-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "28c73692-1325-4815-83ed-e0ae3aec6901" (UID: "28c73692-1325-4815-83ed-e0ae3aec6901"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:36:56 crc kubenswrapper[4753]: I1205 17:36:56.705630 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/28c73692-1325-4815-83ed-e0ae3aec6901-config-data" (OuterVolumeSpecName: "config-data") pod "28c73692-1325-4815-83ed-e0ae3aec6901" (UID: "28c73692-1325-4815-83ed-e0ae3aec6901"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:36:56 crc kubenswrapper[4753]: I1205 17:36:56.714757 4753 scope.go:117] "RemoveContainer" containerID="7d4f0a458b9c0687936b01c6787314109f1ba22173f7fa7f4076977691bc3f12" Dec 05 17:36:56 crc kubenswrapper[4753]: E1205 17:36:56.715244 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7d4f0a458b9c0687936b01c6787314109f1ba22173f7fa7f4076977691bc3f12\": container with ID starting with 7d4f0a458b9c0687936b01c6787314109f1ba22173f7fa7f4076977691bc3f12 not found: ID does not exist" containerID="7d4f0a458b9c0687936b01c6787314109f1ba22173f7fa7f4076977691bc3f12" Dec 05 17:36:56 crc kubenswrapper[4753]: I1205 17:36:56.715300 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7d4f0a458b9c0687936b01c6787314109f1ba22173f7fa7f4076977691bc3f12"} err="failed to get container status \"7d4f0a458b9c0687936b01c6787314109f1ba22173f7fa7f4076977691bc3f12\": rpc error: code = NotFound desc = could not find container \"7d4f0a458b9c0687936b01c6787314109f1ba22173f7fa7f4076977691bc3f12\": container with ID starting with 7d4f0a458b9c0687936b01c6787314109f1ba22173f7fa7f4076977691bc3f12 not found: ID does not exist" Dec 05 17:36:56 crc kubenswrapper[4753]: I1205 17:36:56.775020 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9fpfw\" (UniqueName: \"kubernetes.io/projected/28c73692-1325-4815-83ed-e0ae3aec6901-kube-api-access-9fpfw\") on node \"crc\" DevicePath \"\"" Dec 05 17:36:56 crc kubenswrapper[4753]: I1205 17:36:56.775052 4753 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28c73692-1325-4815-83ed-e0ae3aec6901-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:36:56 crc kubenswrapper[4753]: I1205 17:36:56.775068 4753 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/28c73692-1325-4815-83ed-e0ae3aec6901-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:36:56 crc kubenswrapper[4753]: I1205 17:36:56.972354 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 17:36:56 crc kubenswrapper[4753]: I1205 17:36:56.986970 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 17:36:57 crc kubenswrapper[4753]: I1205 17:36:57.000469 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 17:36:57 crc kubenswrapper[4753]: E1205 17:36:57.000924 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28c73692-1325-4815-83ed-e0ae3aec6901" containerName="nova-cell1-novncproxy-novncproxy" Dec 05 17:36:57 crc kubenswrapper[4753]: I1205 17:36:57.000941 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="28c73692-1325-4815-83ed-e0ae3aec6901" containerName="nova-cell1-novncproxy-novncproxy" Dec 05 17:36:57 crc kubenswrapper[4753]: I1205 17:36:57.001197 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="28c73692-1325-4815-83ed-e0ae3aec6901" containerName="nova-cell1-novncproxy-novncproxy" Dec 05 17:36:57 crc kubenswrapper[4753]: I1205 17:36:57.002046 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 17:36:57 crc kubenswrapper[4753]: I1205 17:36:57.005396 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Dec 05 17:36:57 crc kubenswrapper[4753]: I1205 17:36:57.005597 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Dec 05 17:36:57 crc kubenswrapper[4753]: I1205 17:36:57.005753 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 05 17:36:57 crc kubenswrapper[4753]: I1205 17:36:57.010612 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 17:36:57 crc kubenswrapper[4753]: I1205 17:36:57.182338 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/059b378d-55a3-4652-96ac-804b19815e8d-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"059b378d-55a3-4652-96ac-804b19815e8d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 17:36:57 crc kubenswrapper[4753]: I1205 17:36:57.182423 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/059b378d-55a3-4652-96ac-804b19815e8d-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"059b378d-55a3-4652-96ac-804b19815e8d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 17:36:57 crc kubenswrapper[4753]: I1205 17:36:57.182454 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/059b378d-55a3-4652-96ac-804b19815e8d-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"059b378d-55a3-4652-96ac-804b19815e8d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 17:36:57 crc kubenswrapper[4753]: I1205 17:36:57.182520 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lbtpq\" (UniqueName: \"kubernetes.io/projected/059b378d-55a3-4652-96ac-804b19815e8d-kube-api-access-lbtpq\") pod \"nova-cell1-novncproxy-0\" (UID: \"059b378d-55a3-4652-96ac-804b19815e8d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 17:36:57 crc kubenswrapper[4753]: I1205 17:36:57.182622 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/059b378d-55a3-4652-96ac-804b19815e8d-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"059b378d-55a3-4652-96ac-804b19815e8d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 17:36:57 crc kubenswrapper[4753]: I1205 17:36:57.284893 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lbtpq\" (UniqueName: \"kubernetes.io/projected/059b378d-55a3-4652-96ac-804b19815e8d-kube-api-access-lbtpq\") pod \"nova-cell1-novncproxy-0\" (UID: \"059b378d-55a3-4652-96ac-804b19815e8d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 17:36:57 crc kubenswrapper[4753]: I1205 17:36:57.285063 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/059b378d-55a3-4652-96ac-804b19815e8d-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"059b378d-55a3-4652-96ac-804b19815e8d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 17:36:57 crc kubenswrapper[4753]: I1205 17:36:57.285258 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/059b378d-55a3-4652-96ac-804b19815e8d-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"059b378d-55a3-4652-96ac-804b19815e8d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 17:36:57 crc kubenswrapper[4753]: I1205 17:36:57.285371 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/059b378d-55a3-4652-96ac-804b19815e8d-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"059b378d-55a3-4652-96ac-804b19815e8d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 17:36:57 crc kubenswrapper[4753]: I1205 17:36:57.285420 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/059b378d-55a3-4652-96ac-804b19815e8d-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"059b378d-55a3-4652-96ac-804b19815e8d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 17:36:57 crc kubenswrapper[4753]: I1205 17:36:57.291742 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/059b378d-55a3-4652-96ac-804b19815e8d-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"059b378d-55a3-4652-96ac-804b19815e8d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 17:36:57 crc kubenswrapper[4753]: I1205 17:36:57.292035 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/059b378d-55a3-4652-96ac-804b19815e8d-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"059b378d-55a3-4652-96ac-804b19815e8d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 17:36:57 crc kubenswrapper[4753]: I1205 17:36:57.293078 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/059b378d-55a3-4652-96ac-804b19815e8d-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"059b378d-55a3-4652-96ac-804b19815e8d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 17:36:57 crc kubenswrapper[4753]: I1205 17:36:57.294013 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/059b378d-55a3-4652-96ac-804b19815e8d-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"059b378d-55a3-4652-96ac-804b19815e8d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 17:36:57 crc kubenswrapper[4753]: I1205 17:36:57.309492 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lbtpq\" (UniqueName: \"kubernetes.io/projected/059b378d-55a3-4652-96ac-804b19815e8d-kube-api-access-lbtpq\") pod \"nova-cell1-novncproxy-0\" (UID: \"059b378d-55a3-4652-96ac-804b19815e8d\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 17:36:57 crc kubenswrapper[4753]: I1205 17:36:57.347209 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 17:36:57 crc kubenswrapper[4753]: I1205 17:36:57.645471 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"37b1ed0b-5977-4294-b5c4-0d9d0abd6520","Type":"ContainerStarted","Data":"a251675216c3599ac97587f4518cabace37357ee214b1c552d54621416d0b676"} Dec 05 17:36:57 crc kubenswrapper[4753]: I1205 17:36:57.672771 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=3.3015147320000002 podStartE2EDuration="3.672535136s" podCreationTimestamp="2025-12-05 17:36:54 +0000 UTC" firstStartedPulling="2025-12-05 17:36:56.259480636 +0000 UTC m=+1954.762587652" lastFinishedPulling="2025-12-05 17:36:56.63050106 +0000 UTC m=+1955.133608056" observedRunningTime="2025-12-05 17:36:57.667935565 +0000 UTC m=+1956.171042611" watchObservedRunningTime="2025-12-05 17:36:57.672535136 +0000 UTC m=+1956.175642152" Dec 05 17:36:57 crc kubenswrapper[4753]: I1205 17:36:57.721528 4753 scope.go:117] "RemoveContainer" containerID="87682a74661e693e498cd793cc20d16fc9f4a3b8a1a6b54f10285e2dcd15eafd" Dec 05 17:36:57 crc kubenswrapper[4753]: E1205 17:36:57.721772 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 17:36:57 crc kubenswrapper[4753]: I1205 17:36:57.736005 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="28c73692-1325-4815-83ed-e0ae3aec6901" path="/var/lib/kubelet/pods/28c73692-1325-4815-83ed-e0ae3aec6901/volumes" Dec 05 17:36:57 crc kubenswrapper[4753]: I1205 17:36:57.837053 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 17:36:58 crc kubenswrapper[4753]: I1205 17:36:58.657743 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"059b378d-55a3-4652-96ac-804b19815e8d","Type":"ContainerStarted","Data":"6f5aa4c48f8737f489111b76b9918b1b2abbfc3f4cfc3e2321ceaca2a66496a6"} Dec 05 17:36:58 crc kubenswrapper[4753]: I1205 17:36:58.658347 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"059b378d-55a3-4652-96ac-804b19815e8d","Type":"ContainerStarted","Data":"a2499cb5e545ce7c6671b63b2f0f2e618d2fa3566a296396f3f8579528b02baa"} Dec 05 17:36:58 crc kubenswrapper[4753]: I1205 17:36:58.664320 4753 generic.go:334] "Generic (PLEG): container finished" podID="cdd3659b-07fa-436e-93f0-b6b55fcead58" containerID="f8d8c6b51e78b11df564815a8800e8f02479847d968808269980faba71113b94" exitCode=0 Dec 05 17:36:58 crc kubenswrapper[4753]: I1205 17:36:58.664422 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cdd3659b-07fa-436e-93f0-b6b55fcead58","Type":"ContainerDied","Data":"f8d8c6b51e78b11df564815a8800e8f02479847d968808269980faba71113b94"} Dec 05 17:36:58 crc kubenswrapper[4753]: I1205 17:36:58.664499 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cdd3659b-07fa-436e-93f0-b6b55fcead58","Type":"ContainerDied","Data":"93fa8d6df86e17a8f964479ad62ba06fe38d042eacee5141079308a9c19c5a69"} Dec 05 17:36:58 crc kubenswrapper[4753]: I1205 17:36:58.664513 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="93fa8d6df86e17a8f964479ad62ba06fe38d042eacee5141079308a9c19c5a69" Dec 05 17:36:58 crc kubenswrapper[4753]: I1205 17:36:58.664604 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 05 17:36:58 crc kubenswrapper[4753]: I1205 17:36:58.686916 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.6868976079999998 podStartE2EDuration="2.686897608s" podCreationTimestamp="2025-12-05 17:36:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:36:58.674901819 +0000 UTC m=+1957.178008825" watchObservedRunningTime="2025-12-05 17:36:58.686897608 +0000 UTC m=+1957.190004614" Dec 05 17:36:58 crc kubenswrapper[4753]: I1205 17:36:58.701201 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 17:36:58 crc kubenswrapper[4753]: I1205 17:36:58.793459 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 05 17:36:58 crc kubenswrapper[4753]: I1205 17:36:58.793842 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 05 17:36:58 crc kubenswrapper[4753]: I1205 17:36:58.795287 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 05 17:36:58 crc kubenswrapper[4753]: I1205 17:36:58.797501 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 05 17:36:58 crc kubenswrapper[4753]: I1205 17:36:58.830192 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cdd3659b-07fa-436e-93f0-b6b55fcead58-config-data\") pod \"cdd3659b-07fa-436e-93f0-b6b55fcead58\" (UID: \"cdd3659b-07fa-436e-93f0-b6b55fcead58\") " Dec 05 17:36:58 crc kubenswrapper[4753]: I1205 17:36:58.830414 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cdd3659b-07fa-436e-93f0-b6b55fcead58-log-httpd\") pod \"cdd3659b-07fa-436e-93f0-b6b55fcead58\" (UID: \"cdd3659b-07fa-436e-93f0-b6b55fcead58\") " Dec 05 17:36:58 crc kubenswrapper[4753]: I1205 17:36:58.830542 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cdd3659b-07fa-436e-93f0-b6b55fcead58-sg-core-conf-yaml\") pod \"cdd3659b-07fa-436e-93f0-b6b55fcead58\" (UID: \"cdd3659b-07fa-436e-93f0-b6b55fcead58\") " Dec 05 17:36:58 crc kubenswrapper[4753]: I1205 17:36:58.830585 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cdd3659b-07fa-436e-93f0-b6b55fcead58-run-httpd\") pod \"cdd3659b-07fa-436e-93f0-b6b55fcead58\" (UID: \"cdd3659b-07fa-436e-93f0-b6b55fcead58\") " Dec 05 17:36:58 crc kubenswrapper[4753]: I1205 17:36:58.830615 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cdd3659b-07fa-436e-93f0-b6b55fcead58-scripts\") pod \"cdd3659b-07fa-436e-93f0-b6b55fcead58\" (UID: \"cdd3659b-07fa-436e-93f0-b6b55fcead58\") " Dec 05 17:36:58 crc kubenswrapper[4753]: I1205 17:36:58.830763 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dpf8j\" (UniqueName: \"kubernetes.io/projected/cdd3659b-07fa-436e-93f0-b6b55fcead58-kube-api-access-dpf8j\") pod \"cdd3659b-07fa-436e-93f0-b6b55fcead58\" (UID: \"cdd3659b-07fa-436e-93f0-b6b55fcead58\") " Dec 05 17:36:58 crc kubenswrapper[4753]: I1205 17:36:58.830793 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cdd3659b-07fa-436e-93f0-b6b55fcead58-combined-ca-bundle\") pod \"cdd3659b-07fa-436e-93f0-b6b55fcead58\" (UID: \"cdd3659b-07fa-436e-93f0-b6b55fcead58\") " Dec 05 17:36:58 crc kubenswrapper[4753]: I1205 17:36:58.831068 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cdd3659b-07fa-436e-93f0-b6b55fcead58-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "cdd3659b-07fa-436e-93f0-b6b55fcead58" (UID: "cdd3659b-07fa-436e-93f0-b6b55fcead58"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:36:58 crc kubenswrapper[4753]: I1205 17:36:58.831349 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cdd3659b-07fa-436e-93f0-b6b55fcead58-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "cdd3659b-07fa-436e-93f0-b6b55fcead58" (UID: "cdd3659b-07fa-436e-93f0-b6b55fcead58"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:36:58 crc kubenswrapper[4753]: I1205 17:36:58.831794 4753 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cdd3659b-07fa-436e-93f0-b6b55fcead58-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 17:36:58 crc kubenswrapper[4753]: I1205 17:36:58.831820 4753 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cdd3659b-07fa-436e-93f0-b6b55fcead58-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 17:36:58 crc kubenswrapper[4753]: I1205 17:36:58.842410 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cdd3659b-07fa-436e-93f0-b6b55fcead58-kube-api-access-dpf8j" (OuterVolumeSpecName: "kube-api-access-dpf8j") pod "cdd3659b-07fa-436e-93f0-b6b55fcead58" (UID: "cdd3659b-07fa-436e-93f0-b6b55fcead58"). InnerVolumeSpecName "kube-api-access-dpf8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:36:58 crc kubenswrapper[4753]: I1205 17:36:58.850462 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cdd3659b-07fa-436e-93f0-b6b55fcead58-scripts" (OuterVolumeSpecName: "scripts") pod "cdd3659b-07fa-436e-93f0-b6b55fcead58" (UID: "cdd3659b-07fa-436e-93f0-b6b55fcead58"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:36:58 crc kubenswrapper[4753]: I1205 17:36:58.872252 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cdd3659b-07fa-436e-93f0-b6b55fcead58-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "cdd3659b-07fa-436e-93f0-b6b55fcead58" (UID: "cdd3659b-07fa-436e-93f0-b6b55fcead58"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:36:58 crc kubenswrapper[4753]: I1205 17:36:58.926932 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cdd3659b-07fa-436e-93f0-b6b55fcead58-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cdd3659b-07fa-436e-93f0-b6b55fcead58" (UID: "cdd3659b-07fa-436e-93f0-b6b55fcead58"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:36:58 crc kubenswrapper[4753]: I1205 17:36:58.933539 4753 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cdd3659b-07fa-436e-93f0-b6b55fcead58-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:36:58 crc kubenswrapper[4753]: I1205 17:36:58.933567 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dpf8j\" (UniqueName: \"kubernetes.io/projected/cdd3659b-07fa-436e-93f0-b6b55fcead58-kube-api-access-dpf8j\") on node \"crc\" DevicePath \"\"" Dec 05 17:36:58 crc kubenswrapper[4753]: I1205 17:36:58.933580 4753 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cdd3659b-07fa-436e-93f0-b6b55fcead58-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:36:58 crc kubenswrapper[4753]: I1205 17:36:58.933592 4753 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cdd3659b-07fa-436e-93f0-b6b55fcead58-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 17:36:58 crc kubenswrapper[4753]: I1205 17:36:58.967936 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cdd3659b-07fa-436e-93f0-b6b55fcead58-config-data" (OuterVolumeSpecName: "config-data") pod "cdd3659b-07fa-436e-93f0-b6b55fcead58" (UID: "cdd3659b-07fa-436e-93f0-b6b55fcead58"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:36:59 crc kubenswrapper[4753]: I1205 17:36:59.036033 4753 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cdd3659b-07fa-436e-93f0-b6b55fcead58-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:36:59 crc kubenswrapper[4753]: I1205 17:36:59.673985 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 17:36:59 crc kubenswrapper[4753]: I1205 17:36:59.675562 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 05 17:36:59 crc kubenswrapper[4753]: I1205 17:36:59.680101 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 05 17:36:59 crc kubenswrapper[4753]: I1205 17:36:59.754843 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:36:59 crc kubenswrapper[4753]: I1205 17:36:59.756866 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:36:59 crc kubenswrapper[4753]: I1205 17:36:59.778708 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:36:59 crc kubenswrapper[4753]: E1205 17:36:59.779236 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cdd3659b-07fa-436e-93f0-b6b55fcead58" containerName="ceilometer-central-agent" Dec 05 17:36:59 crc kubenswrapper[4753]: I1205 17:36:59.779257 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="cdd3659b-07fa-436e-93f0-b6b55fcead58" containerName="ceilometer-central-agent" Dec 05 17:36:59 crc kubenswrapper[4753]: E1205 17:36:59.779275 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cdd3659b-07fa-436e-93f0-b6b55fcead58" containerName="ceilometer-notification-agent" Dec 05 17:36:59 crc kubenswrapper[4753]: I1205 17:36:59.779283 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="cdd3659b-07fa-436e-93f0-b6b55fcead58" containerName="ceilometer-notification-agent" Dec 05 17:36:59 crc kubenswrapper[4753]: E1205 17:36:59.779311 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cdd3659b-07fa-436e-93f0-b6b55fcead58" containerName="proxy-httpd" Dec 05 17:36:59 crc kubenswrapper[4753]: I1205 17:36:59.779320 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="cdd3659b-07fa-436e-93f0-b6b55fcead58" containerName="proxy-httpd" Dec 05 17:36:59 crc kubenswrapper[4753]: E1205 17:36:59.779335 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cdd3659b-07fa-436e-93f0-b6b55fcead58" containerName="sg-core" Dec 05 17:36:59 crc kubenswrapper[4753]: I1205 17:36:59.779344 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="cdd3659b-07fa-436e-93f0-b6b55fcead58" containerName="sg-core" Dec 05 17:36:59 crc kubenswrapper[4753]: I1205 17:36:59.779603 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="cdd3659b-07fa-436e-93f0-b6b55fcead58" containerName="ceilometer-notification-agent" Dec 05 17:36:59 crc kubenswrapper[4753]: I1205 17:36:59.779633 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="cdd3659b-07fa-436e-93f0-b6b55fcead58" containerName="sg-core" Dec 05 17:36:59 crc kubenswrapper[4753]: I1205 17:36:59.779648 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="cdd3659b-07fa-436e-93f0-b6b55fcead58" containerName="proxy-httpd" Dec 05 17:36:59 crc kubenswrapper[4753]: I1205 17:36:59.779667 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="cdd3659b-07fa-436e-93f0-b6b55fcead58" containerName="ceilometer-central-agent" Dec 05 17:36:59 crc kubenswrapper[4753]: I1205 17:36:59.782109 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 17:36:59 crc kubenswrapper[4753]: I1205 17:36:59.788388 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 05 17:36:59 crc kubenswrapper[4753]: I1205 17:36:59.789598 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 17:36:59 crc kubenswrapper[4753]: I1205 17:36:59.789495 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 17:36:59 crc kubenswrapper[4753]: I1205 17:36:59.815230 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:36:59 crc kubenswrapper[4753]: I1205 17:36:59.909131 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78468d7767-zhgg5"] Dec 05 17:36:59 crc kubenswrapper[4753]: I1205 17:36:59.911533 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78468d7767-zhgg5" Dec 05 17:36:59 crc kubenswrapper[4753]: I1205 17:36:59.963040 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50\") " pod="openstack/ceilometer-0" Dec 05 17:36:59 crc kubenswrapper[4753]: I1205 17:36:59.963097 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50-log-httpd\") pod \"ceilometer-0\" (UID: \"09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50\") " pod="openstack/ceilometer-0" Dec 05 17:36:59 crc kubenswrapper[4753]: I1205 17:36:59.963378 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50-scripts\") pod \"ceilometer-0\" (UID: \"09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50\") " pod="openstack/ceilometer-0" Dec 05 17:36:59 crc kubenswrapper[4753]: I1205 17:36:59.963526 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50\") " pod="openstack/ceilometer-0" Dec 05 17:36:59 crc kubenswrapper[4753]: I1205 17:36:59.963610 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50-config-data\") pod \"ceilometer-0\" (UID: \"09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50\") " pod="openstack/ceilometer-0" Dec 05 17:36:59 crc kubenswrapper[4753]: I1205 17:36:59.963787 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50-run-httpd\") pod \"ceilometer-0\" (UID: \"09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50\") " pod="openstack/ceilometer-0" Dec 05 17:36:59 crc kubenswrapper[4753]: I1205 17:36:59.963844 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50\") " pod="openstack/ceilometer-0" Dec 05 17:36:59 crc kubenswrapper[4753]: I1205 17:36:59.963990 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8cc8f\" (UniqueName: \"kubernetes.io/projected/09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50-kube-api-access-8cc8f\") pod \"ceilometer-0\" (UID: \"09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50\") " pod="openstack/ceilometer-0" Dec 05 17:36:59 crc kubenswrapper[4753]: I1205 17:36:59.967701 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78468d7767-zhgg5"] Dec 05 17:37:00 crc kubenswrapper[4753]: I1205 17:37:00.066347 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8cc8f\" (UniqueName: \"kubernetes.io/projected/09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50-kube-api-access-8cc8f\") pod \"ceilometer-0\" (UID: \"09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50\") " pod="openstack/ceilometer-0" Dec 05 17:37:00 crc kubenswrapper[4753]: I1205 17:37:00.066402 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/79649179-aa76-48b8-ab8d-786db621db69-config\") pod \"dnsmasq-dns-78468d7767-zhgg5\" (UID: \"79649179-aa76-48b8-ab8d-786db621db69\") " pod="openstack/dnsmasq-dns-78468d7767-zhgg5" Dec 05 17:37:00 crc kubenswrapper[4753]: I1205 17:37:00.066428 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s9hr4\" (UniqueName: \"kubernetes.io/projected/79649179-aa76-48b8-ab8d-786db621db69-kube-api-access-s9hr4\") pod \"dnsmasq-dns-78468d7767-zhgg5\" (UID: \"79649179-aa76-48b8-ab8d-786db621db69\") " pod="openstack/dnsmasq-dns-78468d7767-zhgg5" Dec 05 17:37:00 crc kubenswrapper[4753]: I1205 17:37:00.066650 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/79649179-aa76-48b8-ab8d-786db621db69-ovsdbserver-nb\") pod \"dnsmasq-dns-78468d7767-zhgg5\" (UID: \"79649179-aa76-48b8-ab8d-786db621db69\") " pod="openstack/dnsmasq-dns-78468d7767-zhgg5" Dec 05 17:37:00 crc kubenswrapper[4753]: I1205 17:37:00.066721 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/79649179-aa76-48b8-ab8d-786db621db69-dns-svc\") pod \"dnsmasq-dns-78468d7767-zhgg5\" (UID: \"79649179-aa76-48b8-ab8d-786db621db69\") " pod="openstack/dnsmasq-dns-78468d7767-zhgg5" Dec 05 17:37:00 crc kubenswrapper[4753]: I1205 17:37:00.066781 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50\") " pod="openstack/ceilometer-0" Dec 05 17:37:00 crc kubenswrapper[4753]: I1205 17:37:00.066806 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50-log-httpd\") pod \"ceilometer-0\" (UID: \"09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50\") " pod="openstack/ceilometer-0" Dec 05 17:37:00 crc kubenswrapper[4753]: I1205 17:37:00.066946 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50-scripts\") pod \"ceilometer-0\" (UID: \"09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50\") " pod="openstack/ceilometer-0" Dec 05 17:37:00 crc kubenswrapper[4753]: I1205 17:37:00.066986 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50\") " pod="openstack/ceilometer-0" Dec 05 17:37:00 crc kubenswrapper[4753]: I1205 17:37:00.067035 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50-config-data\") pod \"ceilometer-0\" (UID: \"09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50\") " pod="openstack/ceilometer-0" Dec 05 17:37:00 crc kubenswrapper[4753]: I1205 17:37:00.067117 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/79649179-aa76-48b8-ab8d-786db621db69-ovsdbserver-sb\") pod \"dnsmasq-dns-78468d7767-zhgg5\" (UID: \"79649179-aa76-48b8-ab8d-786db621db69\") " pod="openstack/dnsmasq-dns-78468d7767-zhgg5" Dec 05 17:37:00 crc kubenswrapper[4753]: I1205 17:37:00.067175 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/79649179-aa76-48b8-ab8d-786db621db69-dns-swift-storage-0\") pod \"dnsmasq-dns-78468d7767-zhgg5\" (UID: \"79649179-aa76-48b8-ab8d-786db621db69\") " pod="openstack/dnsmasq-dns-78468d7767-zhgg5" Dec 05 17:37:00 crc kubenswrapper[4753]: I1205 17:37:00.067225 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50-run-httpd\") pod \"ceilometer-0\" (UID: \"09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50\") " pod="openstack/ceilometer-0" Dec 05 17:37:00 crc kubenswrapper[4753]: I1205 17:37:00.067300 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50-log-httpd\") pod \"ceilometer-0\" (UID: \"09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50\") " pod="openstack/ceilometer-0" Dec 05 17:37:00 crc kubenswrapper[4753]: I1205 17:37:00.067445 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50\") " pod="openstack/ceilometer-0" Dec 05 17:37:00 crc kubenswrapper[4753]: I1205 17:37:00.067700 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50-run-httpd\") pod \"ceilometer-0\" (UID: \"09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50\") " pod="openstack/ceilometer-0" Dec 05 17:37:00 crc kubenswrapper[4753]: I1205 17:37:00.070841 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50-config-data\") pod \"ceilometer-0\" (UID: \"09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50\") " pod="openstack/ceilometer-0" Dec 05 17:37:00 crc kubenswrapper[4753]: I1205 17:37:00.071288 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50-scripts\") pod \"ceilometer-0\" (UID: \"09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50\") " pod="openstack/ceilometer-0" Dec 05 17:37:00 crc kubenswrapper[4753]: I1205 17:37:00.071434 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50\") " pod="openstack/ceilometer-0" Dec 05 17:37:00 crc kubenswrapper[4753]: I1205 17:37:00.072553 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50\") " pod="openstack/ceilometer-0" Dec 05 17:37:00 crc kubenswrapper[4753]: I1205 17:37:00.083221 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50\") " pod="openstack/ceilometer-0" Dec 05 17:37:00 crc kubenswrapper[4753]: I1205 17:37:00.095978 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8cc8f\" (UniqueName: \"kubernetes.io/projected/09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50-kube-api-access-8cc8f\") pod \"ceilometer-0\" (UID: \"09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50\") " pod="openstack/ceilometer-0" Dec 05 17:37:00 crc kubenswrapper[4753]: I1205 17:37:00.142076 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 17:37:00 crc kubenswrapper[4753]: I1205 17:37:00.170879 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/79649179-aa76-48b8-ab8d-786db621db69-ovsdbserver-sb\") pod \"dnsmasq-dns-78468d7767-zhgg5\" (UID: \"79649179-aa76-48b8-ab8d-786db621db69\") " pod="openstack/dnsmasq-dns-78468d7767-zhgg5" Dec 05 17:37:00 crc kubenswrapper[4753]: I1205 17:37:00.170933 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/79649179-aa76-48b8-ab8d-786db621db69-dns-swift-storage-0\") pod \"dnsmasq-dns-78468d7767-zhgg5\" (UID: \"79649179-aa76-48b8-ab8d-786db621db69\") " pod="openstack/dnsmasq-dns-78468d7767-zhgg5" Dec 05 17:37:00 crc kubenswrapper[4753]: I1205 17:37:00.171000 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/79649179-aa76-48b8-ab8d-786db621db69-config\") pod \"dnsmasq-dns-78468d7767-zhgg5\" (UID: \"79649179-aa76-48b8-ab8d-786db621db69\") " pod="openstack/dnsmasq-dns-78468d7767-zhgg5" Dec 05 17:37:00 crc kubenswrapper[4753]: I1205 17:37:00.171019 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s9hr4\" (UniqueName: \"kubernetes.io/projected/79649179-aa76-48b8-ab8d-786db621db69-kube-api-access-s9hr4\") pod \"dnsmasq-dns-78468d7767-zhgg5\" (UID: \"79649179-aa76-48b8-ab8d-786db621db69\") " pod="openstack/dnsmasq-dns-78468d7767-zhgg5" Dec 05 17:37:00 crc kubenswrapper[4753]: I1205 17:37:00.171075 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/79649179-aa76-48b8-ab8d-786db621db69-ovsdbserver-nb\") pod \"dnsmasq-dns-78468d7767-zhgg5\" (UID: \"79649179-aa76-48b8-ab8d-786db621db69\") " pod="openstack/dnsmasq-dns-78468d7767-zhgg5" Dec 05 17:37:00 crc kubenswrapper[4753]: I1205 17:37:00.171093 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/79649179-aa76-48b8-ab8d-786db621db69-dns-svc\") pod \"dnsmasq-dns-78468d7767-zhgg5\" (UID: \"79649179-aa76-48b8-ab8d-786db621db69\") " pod="openstack/dnsmasq-dns-78468d7767-zhgg5" Dec 05 17:37:00 crc kubenswrapper[4753]: I1205 17:37:00.172083 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/79649179-aa76-48b8-ab8d-786db621db69-dns-svc\") pod \"dnsmasq-dns-78468d7767-zhgg5\" (UID: \"79649179-aa76-48b8-ab8d-786db621db69\") " pod="openstack/dnsmasq-dns-78468d7767-zhgg5" Dec 05 17:37:00 crc kubenswrapper[4753]: I1205 17:37:00.172274 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/79649179-aa76-48b8-ab8d-786db621db69-config\") pod \"dnsmasq-dns-78468d7767-zhgg5\" (UID: \"79649179-aa76-48b8-ab8d-786db621db69\") " pod="openstack/dnsmasq-dns-78468d7767-zhgg5" Dec 05 17:37:00 crc kubenswrapper[4753]: I1205 17:37:00.172273 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/79649179-aa76-48b8-ab8d-786db621db69-ovsdbserver-sb\") pod \"dnsmasq-dns-78468d7767-zhgg5\" (UID: \"79649179-aa76-48b8-ab8d-786db621db69\") " pod="openstack/dnsmasq-dns-78468d7767-zhgg5" Dec 05 17:37:00 crc kubenswrapper[4753]: I1205 17:37:00.172660 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/79649179-aa76-48b8-ab8d-786db621db69-dns-swift-storage-0\") pod \"dnsmasq-dns-78468d7767-zhgg5\" (UID: \"79649179-aa76-48b8-ab8d-786db621db69\") " pod="openstack/dnsmasq-dns-78468d7767-zhgg5" Dec 05 17:37:00 crc kubenswrapper[4753]: I1205 17:37:00.172853 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/79649179-aa76-48b8-ab8d-786db621db69-ovsdbserver-nb\") pod \"dnsmasq-dns-78468d7767-zhgg5\" (UID: \"79649179-aa76-48b8-ab8d-786db621db69\") " pod="openstack/dnsmasq-dns-78468d7767-zhgg5" Dec 05 17:37:00 crc kubenswrapper[4753]: I1205 17:37:00.187993 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s9hr4\" (UniqueName: \"kubernetes.io/projected/79649179-aa76-48b8-ab8d-786db621db69-kube-api-access-s9hr4\") pod \"dnsmasq-dns-78468d7767-zhgg5\" (UID: \"79649179-aa76-48b8-ab8d-786db621db69\") " pod="openstack/dnsmasq-dns-78468d7767-zhgg5" Dec 05 17:37:00 crc kubenswrapper[4753]: I1205 17:37:00.271861 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78468d7767-zhgg5" Dec 05 17:37:00 crc kubenswrapper[4753]: I1205 17:37:00.658406 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:37:00 crc kubenswrapper[4753]: W1205 17:37:00.680025 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod09bdaaf1_9ac6_4a17_a9b7_ba6e6a63bf50.slice/crio-1d0fab62ceb8f422cdcb5065fb2c85fc2cdcd1372de6ed12cf4eabbcd0dc7ae0 WatchSource:0}: Error finding container 1d0fab62ceb8f422cdcb5065fb2c85fc2cdcd1372de6ed12cf4eabbcd0dc7ae0: Status 404 returned error can't find the container with id 1d0fab62ceb8f422cdcb5065fb2c85fc2cdcd1372de6ed12cf4eabbcd0dc7ae0 Dec 05 17:37:00 crc kubenswrapper[4753]: I1205 17:37:00.791270 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78468d7767-zhgg5"] Dec 05 17:37:00 crc kubenswrapper[4753]: W1205 17:37:00.799662 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod79649179_aa76_48b8_ab8d_786db621db69.slice/crio-fe17b64d05c533329bada400ed5b45fbae229a2f4778bf2b1fde1ea9f83272ff WatchSource:0}: Error finding container fe17b64d05c533329bada400ed5b45fbae229a2f4778bf2b1fde1ea9f83272ff: Status 404 returned error can't find the container with id fe17b64d05c533329bada400ed5b45fbae229a2f4778bf2b1fde1ea9f83272ff Dec 05 17:37:01 crc kubenswrapper[4753]: I1205 17:37:01.704594 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50","Type":"ContainerStarted","Data":"582e90ba16b9dab7dd7e81af9f0af341e3ad61841f6c047f52921870bec0697f"} Dec 05 17:37:01 crc kubenswrapper[4753]: I1205 17:37:01.705092 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50","Type":"ContainerStarted","Data":"1d0fab62ceb8f422cdcb5065fb2c85fc2cdcd1372de6ed12cf4eabbcd0dc7ae0"} Dec 05 17:37:01 crc kubenswrapper[4753]: I1205 17:37:01.707295 4753 generic.go:334] "Generic (PLEG): container finished" podID="79649179-aa76-48b8-ab8d-786db621db69" containerID="cc1e143bf6f8894665b168b9dc2f27259daaaf5dc753daef4adabf6c55f79c66" exitCode=0 Dec 05 17:37:01 crc kubenswrapper[4753]: I1205 17:37:01.707331 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78468d7767-zhgg5" event={"ID":"79649179-aa76-48b8-ab8d-786db621db69","Type":"ContainerDied","Data":"cc1e143bf6f8894665b168b9dc2f27259daaaf5dc753daef4adabf6c55f79c66"} Dec 05 17:37:01 crc kubenswrapper[4753]: I1205 17:37:01.707365 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78468d7767-zhgg5" event={"ID":"79649179-aa76-48b8-ab8d-786db621db69","Type":"ContainerStarted","Data":"fe17b64d05c533329bada400ed5b45fbae229a2f4778bf2b1fde1ea9f83272ff"} Dec 05 17:37:01 crc kubenswrapper[4753]: I1205 17:37:01.738636 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cdd3659b-07fa-436e-93f0-b6b55fcead58" path="/var/lib/kubelet/pods/cdd3659b-07fa-436e-93f0-b6b55fcead58/volumes" Dec 05 17:37:02 crc kubenswrapper[4753]: I1205 17:37:02.347708 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 05 17:37:02 crc kubenswrapper[4753]: I1205 17:37:02.727996 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78468d7767-zhgg5" event={"ID":"79649179-aa76-48b8-ab8d-786db621db69","Type":"ContainerStarted","Data":"126cfd06ee02ce79fdb9c54438451ffe3748e4ed90f0255d67087a16edc454b4"} Dec 05 17:37:02 crc kubenswrapper[4753]: I1205 17:37:02.728540 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-78468d7767-zhgg5" Dec 05 17:37:02 crc kubenswrapper[4753]: I1205 17:37:02.735432 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50","Type":"ContainerStarted","Data":"aaad6c895ea31076713cb9ed34c08c84cd4f9ed5bc5620978dec95bc14a459bc"} Dec 05 17:37:02 crc kubenswrapper[4753]: I1205 17:37:02.762143 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-78468d7767-zhgg5" podStartSLOduration=3.7621229400000002 podStartE2EDuration="3.76212294s" podCreationTimestamp="2025-12-05 17:36:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:37:02.754031851 +0000 UTC m=+1961.257138867" watchObservedRunningTime="2025-12-05 17:37:02.76212294 +0000 UTC m=+1961.265229956" Dec 05 17:37:02 crc kubenswrapper[4753]: I1205 17:37:02.987546 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 17:37:02 crc kubenswrapper[4753]: I1205 17:37:02.987825 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="749effd3-8555-4349-8d88-c17817648b03" containerName="nova-api-log" containerID="cri-o://1c4fe30f3fe65a56accfa564dbbbf7813bfb66ab0282e0dc77e06f3a1b69682a" gracePeriod=30 Dec 05 17:37:02 crc kubenswrapper[4753]: I1205 17:37:02.987891 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="749effd3-8555-4349-8d88-c17817648b03" containerName="nova-api-api" containerID="cri-o://62f3f6a71c76e7680bb62a4110755c445b50ea8af3d0403788bd548027189d24" gracePeriod=30 Dec 05 17:37:03 crc kubenswrapper[4753]: I1205 17:37:03.671000 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:37:03 crc kubenswrapper[4753]: I1205 17:37:03.746613 4753 generic.go:334] "Generic (PLEG): container finished" podID="749effd3-8555-4349-8d88-c17817648b03" containerID="1c4fe30f3fe65a56accfa564dbbbf7813bfb66ab0282e0dc77e06f3a1b69682a" exitCode=143 Dec 05 17:37:03 crc kubenswrapper[4753]: I1205 17:37:03.746681 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"749effd3-8555-4349-8d88-c17817648b03","Type":"ContainerDied","Data":"1c4fe30f3fe65a56accfa564dbbbf7813bfb66ab0282e0dc77e06f3a1b69682a"} Dec 05 17:37:03 crc kubenswrapper[4753]: I1205 17:37:03.750198 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50","Type":"ContainerStarted","Data":"7ec5b6a9e8ea30d60bac68561b89c727681c294f93cbebb95dceb0eb0ea4b5a9"} Dec 05 17:37:04 crc kubenswrapper[4753]: I1205 17:37:04.766559 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50","Type":"ContainerStarted","Data":"af3b62f84f826fb6e4c4205740314289f2b67985daecbcf7786018bcd87d0e39"} Dec 05 17:37:04 crc kubenswrapper[4753]: I1205 17:37:04.766702 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50" containerName="ceilometer-central-agent" containerID="cri-o://582e90ba16b9dab7dd7e81af9f0af341e3ad61841f6c047f52921870bec0697f" gracePeriod=30 Dec 05 17:37:04 crc kubenswrapper[4753]: I1205 17:37:04.766903 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50" containerName="proxy-httpd" containerID="cri-o://af3b62f84f826fb6e4c4205740314289f2b67985daecbcf7786018bcd87d0e39" gracePeriod=30 Dec 05 17:37:04 crc kubenswrapper[4753]: I1205 17:37:04.766910 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50" containerName="sg-core" containerID="cri-o://7ec5b6a9e8ea30d60bac68561b89c727681c294f93cbebb95dceb0eb0ea4b5a9" gracePeriod=30 Dec 05 17:37:04 crc kubenswrapper[4753]: I1205 17:37:04.766960 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50" containerName="ceilometer-notification-agent" containerID="cri-o://aaad6c895ea31076713cb9ed34c08c84cd4f9ed5bc5620978dec95bc14a459bc" gracePeriod=30 Dec 05 17:37:04 crc kubenswrapper[4753]: I1205 17:37:04.767093 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 17:37:04 crc kubenswrapper[4753]: I1205 17:37:04.790765 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.360243337 podStartE2EDuration="5.790746501s" podCreationTimestamp="2025-12-05 17:36:59 +0000 UTC" firstStartedPulling="2025-12-05 17:37:00.685418648 +0000 UTC m=+1959.188525654" lastFinishedPulling="2025-12-05 17:37:04.115921812 +0000 UTC m=+1962.619028818" observedRunningTime="2025-12-05 17:37:04.785261025 +0000 UTC m=+1963.288368031" watchObservedRunningTime="2025-12-05 17:37:04.790746501 +0000 UTC m=+1963.293853507" Dec 05 17:37:05 crc kubenswrapper[4753]: I1205 17:37:05.063630 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 05 17:37:05 crc kubenswrapper[4753]: I1205 17:37:05.781128 4753 generic.go:334] "Generic (PLEG): container finished" podID="09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50" containerID="af3b62f84f826fb6e4c4205740314289f2b67985daecbcf7786018bcd87d0e39" exitCode=0 Dec 05 17:37:05 crc kubenswrapper[4753]: I1205 17:37:05.781559 4753 generic.go:334] "Generic (PLEG): container finished" podID="09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50" containerID="7ec5b6a9e8ea30d60bac68561b89c727681c294f93cbebb95dceb0eb0ea4b5a9" exitCode=2 Dec 05 17:37:05 crc kubenswrapper[4753]: I1205 17:37:05.781573 4753 generic.go:334] "Generic (PLEG): container finished" podID="09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50" containerID="aaad6c895ea31076713cb9ed34c08c84cd4f9ed5bc5620978dec95bc14a459bc" exitCode=0 Dec 05 17:37:05 crc kubenswrapper[4753]: I1205 17:37:05.781224 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50","Type":"ContainerDied","Data":"af3b62f84f826fb6e4c4205740314289f2b67985daecbcf7786018bcd87d0e39"} Dec 05 17:37:05 crc kubenswrapper[4753]: I1205 17:37:05.781636 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50","Type":"ContainerDied","Data":"7ec5b6a9e8ea30d60bac68561b89c727681c294f93cbebb95dceb0eb0ea4b5a9"} Dec 05 17:37:05 crc kubenswrapper[4753]: I1205 17:37:05.781652 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50","Type":"ContainerDied","Data":"aaad6c895ea31076713cb9ed34c08c84cd4f9ed5bc5620978dec95bc14a459bc"} Dec 05 17:37:06 crc kubenswrapper[4753]: I1205 17:37:06.765816 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 17:37:06 crc kubenswrapper[4753]: I1205 17:37:06.797938 4753 generic.go:334] "Generic (PLEG): container finished" podID="749effd3-8555-4349-8d88-c17817648b03" containerID="62f3f6a71c76e7680bb62a4110755c445b50ea8af3d0403788bd548027189d24" exitCode=0 Dec 05 17:37:06 crc kubenswrapper[4753]: I1205 17:37:06.797986 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"749effd3-8555-4349-8d88-c17817648b03","Type":"ContainerDied","Data":"62f3f6a71c76e7680bb62a4110755c445b50ea8af3d0403788bd548027189d24"} Dec 05 17:37:06 crc kubenswrapper[4753]: I1205 17:37:06.798017 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"749effd3-8555-4349-8d88-c17817648b03","Type":"ContainerDied","Data":"6fe8e57ffbc5b90a89ec3133fa8886fc29df5260c2b2b8586d4dd5ad61dd3c0c"} Dec 05 17:37:06 crc kubenswrapper[4753]: I1205 17:37:06.798232 4753 scope.go:117] "RemoveContainer" containerID="62f3f6a71c76e7680bb62a4110755c445b50ea8af3d0403788bd548027189d24" Dec 05 17:37:06 crc kubenswrapper[4753]: I1205 17:37:06.798887 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 17:37:06 crc kubenswrapper[4753]: I1205 17:37:06.871941 4753 scope.go:117] "RemoveContainer" containerID="1c4fe30f3fe65a56accfa564dbbbf7813bfb66ab0282e0dc77e06f3a1b69682a" Dec 05 17:37:06 crc kubenswrapper[4753]: I1205 17:37:06.910728 4753 scope.go:117] "RemoveContainer" containerID="62f3f6a71c76e7680bb62a4110755c445b50ea8af3d0403788bd548027189d24" Dec 05 17:37:06 crc kubenswrapper[4753]: E1205 17:37:06.911239 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"62f3f6a71c76e7680bb62a4110755c445b50ea8af3d0403788bd548027189d24\": container with ID starting with 62f3f6a71c76e7680bb62a4110755c445b50ea8af3d0403788bd548027189d24 not found: ID does not exist" containerID="62f3f6a71c76e7680bb62a4110755c445b50ea8af3d0403788bd548027189d24" Dec 05 17:37:06 crc kubenswrapper[4753]: I1205 17:37:06.911275 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"62f3f6a71c76e7680bb62a4110755c445b50ea8af3d0403788bd548027189d24"} err="failed to get container status \"62f3f6a71c76e7680bb62a4110755c445b50ea8af3d0403788bd548027189d24\": rpc error: code = NotFound desc = could not find container \"62f3f6a71c76e7680bb62a4110755c445b50ea8af3d0403788bd548027189d24\": container with ID starting with 62f3f6a71c76e7680bb62a4110755c445b50ea8af3d0403788bd548027189d24 not found: ID does not exist" Dec 05 17:37:06 crc kubenswrapper[4753]: I1205 17:37:06.911300 4753 scope.go:117] "RemoveContainer" containerID="1c4fe30f3fe65a56accfa564dbbbf7813bfb66ab0282e0dc77e06f3a1b69682a" Dec 05 17:37:06 crc kubenswrapper[4753]: E1205 17:37:06.911503 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1c4fe30f3fe65a56accfa564dbbbf7813bfb66ab0282e0dc77e06f3a1b69682a\": container with ID starting with 1c4fe30f3fe65a56accfa564dbbbf7813bfb66ab0282e0dc77e06f3a1b69682a not found: ID does not exist" containerID="1c4fe30f3fe65a56accfa564dbbbf7813bfb66ab0282e0dc77e06f3a1b69682a" Dec 05 17:37:06 crc kubenswrapper[4753]: I1205 17:37:06.911527 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1c4fe30f3fe65a56accfa564dbbbf7813bfb66ab0282e0dc77e06f3a1b69682a"} err="failed to get container status \"1c4fe30f3fe65a56accfa564dbbbf7813bfb66ab0282e0dc77e06f3a1b69682a\": rpc error: code = NotFound desc = could not find container \"1c4fe30f3fe65a56accfa564dbbbf7813bfb66ab0282e0dc77e06f3a1b69682a\": container with ID starting with 1c4fe30f3fe65a56accfa564dbbbf7813bfb66ab0282e0dc77e06f3a1b69682a not found: ID does not exist" Dec 05 17:37:06 crc kubenswrapper[4753]: I1205 17:37:06.959521 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s6wl6\" (UniqueName: \"kubernetes.io/projected/749effd3-8555-4349-8d88-c17817648b03-kube-api-access-s6wl6\") pod \"749effd3-8555-4349-8d88-c17817648b03\" (UID: \"749effd3-8555-4349-8d88-c17817648b03\") " Dec 05 17:37:06 crc kubenswrapper[4753]: I1205 17:37:06.959627 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/749effd3-8555-4349-8d88-c17817648b03-logs\") pod \"749effd3-8555-4349-8d88-c17817648b03\" (UID: \"749effd3-8555-4349-8d88-c17817648b03\") " Dec 05 17:37:06 crc kubenswrapper[4753]: I1205 17:37:06.959788 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/749effd3-8555-4349-8d88-c17817648b03-combined-ca-bundle\") pod \"749effd3-8555-4349-8d88-c17817648b03\" (UID: \"749effd3-8555-4349-8d88-c17817648b03\") " Dec 05 17:37:06 crc kubenswrapper[4753]: I1205 17:37:06.959868 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/749effd3-8555-4349-8d88-c17817648b03-config-data\") pod \"749effd3-8555-4349-8d88-c17817648b03\" (UID: \"749effd3-8555-4349-8d88-c17817648b03\") " Dec 05 17:37:06 crc kubenswrapper[4753]: I1205 17:37:06.963395 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/749effd3-8555-4349-8d88-c17817648b03-logs" (OuterVolumeSpecName: "logs") pod "749effd3-8555-4349-8d88-c17817648b03" (UID: "749effd3-8555-4349-8d88-c17817648b03"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:37:06 crc kubenswrapper[4753]: I1205 17:37:06.969468 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/749effd3-8555-4349-8d88-c17817648b03-kube-api-access-s6wl6" (OuterVolumeSpecName: "kube-api-access-s6wl6") pod "749effd3-8555-4349-8d88-c17817648b03" (UID: "749effd3-8555-4349-8d88-c17817648b03"). InnerVolumeSpecName "kube-api-access-s6wl6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:37:06 crc kubenswrapper[4753]: I1205 17:37:06.991813 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/749effd3-8555-4349-8d88-c17817648b03-config-data" (OuterVolumeSpecName: "config-data") pod "749effd3-8555-4349-8d88-c17817648b03" (UID: "749effd3-8555-4349-8d88-c17817648b03"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:37:06 crc kubenswrapper[4753]: I1205 17:37:06.998832 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/749effd3-8555-4349-8d88-c17817648b03-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "749effd3-8555-4349-8d88-c17817648b03" (UID: "749effd3-8555-4349-8d88-c17817648b03"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:37:07 crc kubenswrapper[4753]: I1205 17:37:07.062114 4753 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/749effd3-8555-4349-8d88-c17817648b03-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:37:07 crc kubenswrapper[4753]: I1205 17:37:07.062194 4753 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/749effd3-8555-4349-8d88-c17817648b03-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:37:07 crc kubenswrapper[4753]: I1205 17:37:07.062224 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s6wl6\" (UniqueName: \"kubernetes.io/projected/749effd3-8555-4349-8d88-c17817648b03-kube-api-access-s6wl6\") on node \"crc\" DevicePath \"\"" Dec 05 17:37:07 crc kubenswrapper[4753]: I1205 17:37:07.062244 4753 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/749effd3-8555-4349-8d88-c17817648b03-logs\") on node \"crc\" DevicePath \"\"" Dec 05 17:37:07 crc kubenswrapper[4753]: I1205 17:37:07.173912 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 17:37:07 crc kubenswrapper[4753]: I1205 17:37:07.184379 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 05 17:37:07 crc kubenswrapper[4753]: I1205 17:37:07.199587 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 05 17:37:07 crc kubenswrapper[4753]: E1205 17:37:07.200341 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="749effd3-8555-4349-8d88-c17817648b03" containerName="nova-api-api" Dec 05 17:37:07 crc kubenswrapper[4753]: I1205 17:37:07.200369 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="749effd3-8555-4349-8d88-c17817648b03" containerName="nova-api-api" Dec 05 17:37:07 crc kubenswrapper[4753]: E1205 17:37:07.200429 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="749effd3-8555-4349-8d88-c17817648b03" containerName="nova-api-log" Dec 05 17:37:07 crc kubenswrapper[4753]: I1205 17:37:07.200439 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="749effd3-8555-4349-8d88-c17817648b03" containerName="nova-api-log" Dec 05 17:37:07 crc kubenswrapper[4753]: I1205 17:37:07.200687 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="749effd3-8555-4349-8d88-c17817648b03" containerName="nova-api-api" Dec 05 17:37:07 crc kubenswrapper[4753]: I1205 17:37:07.200730 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="749effd3-8555-4349-8d88-c17817648b03" containerName="nova-api-log" Dec 05 17:37:07 crc kubenswrapper[4753]: I1205 17:37:07.202384 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 17:37:07 crc kubenswrapper[4753]: I1205 17:37:07.204611 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 05 17:37:07 crc kubenswrapper[4753]: I1205 17:37:07.204919 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 05 17:37:07 crc kubenswrapper[4753]: I1205 17:37:07.205423 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 05 17:37:07 crc kubenswrapper[4753]: I1205 17:37:07.209727 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 17:37:07 crc kubenswrapper[4753]: I1205 17:37:07.347922 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Dec 05 17:37:07 crc kubenswrapper[4753]: I1205 17:37:07.369084 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3163f6c9-2070-4787-9b8b-d89d2d29a27c-public-tls-certs\") pod \"nova-api-0\" (UID: \"3163f6c9-2070-4787-9b8b-d89d2d29a27c\") " pod="openstack/nova-api-0" Dec 05 17:37:07 crc kubenswrapper[4753]: I1205 17:37:07.369224 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t7fsp\" (UniqueName: \"kubernetes.io/projected/3163f6c9-2070-4787-9b8b-d89d2d29a27c-kube-api-access-t7fsp\") pod \"nova-api-0\" (UID: \"3163f6c9-2070-4787-9b8b-d89d2d29a27c\") " pod="openstack/nova-api-0" Dec 05 17:37:07 crc kubenswrapper[4753]: I1205 17:37:07.369295 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3163f6c9-2070-4787-9b8b-d89d2d29a27c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"3163f6c9-2070-4787-9b8b-d89d2d29a27c\") " pod="openstack/nova-api-0" Dec 05 17:37:07 crc kubenswrapper[4753]: I1205 17:37:07.369521 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3163f6c9-2070-4787-9b8b-d89d2d29a27c-internal-tls-certs\") pod \"nova-api-0\" (UID: \"3163f6c9-2070-4787-9b8b-d89d2d29a27c\") " pod="openstack/nova-api-0" Dec 05 17:37:07 crc kubenswrapper[4753]: I1205 17:37:07.369734 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3163f6c9-2070-4787-9b8b-d89d2d29a27c-config-data\") pod \"nova-api-0\" (UID: \"3163f6c9-2070-4787-9b8b-d89d2d29a27c\") " pod="openstack/nova-api-0" Dec 05 17:37:07 crc kubenswrapper[4753]: I1205 17:37:07.369931 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3163f6c9-2070-4787-9b8b-d89d2d29a27c-logs\") pod \"nova-api-0\" (UID: \"3163f6c9-2070-4787-9b8b-d89d2d29a27c\") " pod="openstack/nova-api-0" Dec 05 17:37:07 crc kubenswrapper[4753]: I1205 17:37:07.385108 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Dec 05 17:37:07 crc kubenswrapper[4753]: I1205 17:37:07.472405 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3163f6c9-2070-4787-9b8b-d89d2d29a27c-logs\") pod \"nova-api-0\" (UID: \"3163f6c9-2070-4787-9b8b-d89d2d29a27c\") " pod="openstack/nova-api-0" Dec 05 17:37:07 crc kubenswrapper[4753]: I1205 17:37:07.472508 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3163f6c9-2070-4787-9b8b-d89d2d29a27c-public-tls-certs\") pod \"nova-api-0\" (UID: \"3163f6c9-2070-4787-9b8b-d89d2d29a27c\") " pod="openstack/nova-api-0" Dec 05 17:37:07 crc kubenswrapper[4753]: I1205 17:37:07.472544 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t7fsp\" (UniqueName: \"kubernetes.io/projected/3163f6c9-2070-4787-9b8b-d89d2d29a27c-kube-api-access-t7fsp\") pod \"nova-api-0\" (UID: \"3163f6c9-2070-4787-9b8b-d89d2d29a27c\") " pod="openstack/nova-api-0" Dec 05 17:37:07 crc kubenswrapper[4753]: I1205 17:37:07.472581 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3163f6c9-2070-4787-9b8b-d89d2d29a27c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"3163f6c9-2070-4787-9b8b-d89d2d29a27c\") " pod="openstack/nova-api-0" Dec 05 17:37:07 crc kubenswrapper[4753]: I1205 17:37:07.472689 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3163f6c9-2070-4787-9b8b-d89d2d29a27c-internal-tls-certs\") pod \"nova-api-0\" (UID: \"3163f6c9-2070-4787-9b8b-d89d2d29a27c\") " pod="openstack/nova-api-0" Dec 05 17:37:07 crc kubenswrapper[4753]: I1205 17:37:07.472779 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3163f6c9-2070-4787-9b8b-d89d2d29a27c-config-data\") pod \"nova-api-0\" (UID: \"3163f6c9-2070-4787-9b8b-d89d2d29a27c\") " pod="openstack/nova-api-0" Dec 05 17:37:07 crc kubenswrapper[4753]: I1205 17:37:07.472840 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3163f6c9-2070-4787-9b8b-d89d2d29a27c-logs\") pod \"nova-api-0\" (UID: \"3163f6c9-2070-4787-9b8b-d89d2d29a27c\") " pod="openstack/nova-api-0" Dec 05 17:37:07 crc kubenswrapper[4753]: I1205 17:37:07.477723 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3163f6c9-2070-4787-9b8b-d89d2d29a27c-public-tls-certs\") pod \"nova-api-0\" (UID: \"3163f6c9-2070-4787-9b8b-d89d2d29a27c\") " pod="openstack/nova-api-0" Dec 05 17:37:07 crc kubenswrapper[4753]: I1205 17:37:07.477876 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3163f6c9-2070-4787-9b8b-d89d2d29a27c-config-data\") pod \"nova-api-0\" (UID: \"3163f6c9-2070-4787-9b8b-d89d2d29a27c\") " pod="openstack/nova-api-0" Dec 05 17:37:07 crc kubenswrapper[4753]: I1205 17:37:07.478004 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3163f6c9-2070-4787-9b8b-d89d2d29a27c-internal-tls-certs\") pod \"nova-api-0\" (UID: \"3163f6c9-2070-4787-9b8b-d89d2d29a27c\") " pod="openstack/nova-api-0" Dec 05 17:37:07 crc kubenswrapper[4753]: I1205 17:37:07.479289 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3163f6c9-2070-4787-9b8b-d89d2d29a27c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"3163f6c9-2070-4787-9b8b-d89d2d29a27c\") " pod="openstack/nova-api-0" Dec 05 17:37:07 crc kubenswrapper[4753]: I1205 17:37:07.493839 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t7fsp\" (UniqueName: \"kubernetes.io/projected/3163f6c9-2070-4787-9b8b-d89d2d29a27c-kube-api-access-t7fsp\") pod \"nova-api-0\" (UID: \"3163f6c9-2070-4787-9b8b-d89d2d29a27c\") " pod="openstack/nova-api-0" Dec 05 17:37:07 crc kubenswrapper[4753]: I1205 17:37:07.520399 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 17:37:07 crc kubenswrapper[4753]: I1205 17:37:07.736470 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="749effd3-8555-4349-8d88-c17817648b03" path="/var/lib/kubelet/pods/749effd3-8555-4349-8d88-c17817648b03/volumes" Dec 05 17:37:07 crc kubenswrapper[4753]: I1205 17:37:07.845742 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Dec 05 17:37:08 crc kubenswrapper[4753]: I1205 17:37:08.007814 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-2wp87"] Dec 05 17:37:08 crc kubenswrapper[4753]: I1205 17:37:08.009444 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-2wp87" Dec 05 17:37:08 crc kubenswrapper[4753]: I1205 17:37:08.012493 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Dec 05 17:37:08 crc kubenswrapper[4753]: I1205 17:37:08.012764 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Dec 05 17:37:08 crc kubenswrapper[4753]: I1205 17:37:08.019630 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-2wp87"] Dec 05 17:37:08 crc kubenswrapper[4753]: W1205 17:37:08.105062 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3163f6c9_2070_4787_9b8b_d89d2d29a27c.slice/crio-1cead7f63f5b8a25ec8eb804903af3e9af70aa5a9c8c7d738213371e397fc118 WatchSource:0}: Error finding container 1cead7f63f5b8a25ec8eb804903af3e9af70aa5a9c8c7d738213371e397fc118: Status 404 returned error can't find the container with id 1cead7f63f5b8a25ec8eb804903af3e9af70aa5a9c8c7d738213371e397fc118 Dec 05 17:37:08 crc kubenswrapper[4753]: I1205 17:37:08.111789 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 17:37:08 crc kubenswrapper[4753]: I1205 17:37:08.197533 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4thcv\" (UniqueName: \"kubernetes.io/projected/4ec2b050-b2dd-4c44-9d5b-29babc0e4d0b-kube-api-access-4thcv\") pod \"nova-cell1-cell-mapping-2wp87\" (UID: \"4ec2b050-b2dd-4c44-9d5b-29babc0e4d0b\") " pod="openstack/nova-cell1-cell-mapping-2wp87" Dec 05 17:37:08 crc kubenswrapper[4753]: I1205 17:37:08.197916 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4ec2b050-b2dd-4c44-9d5b-29babc0e4d0b-scripts\") pod \"nova-cell1-cell-mapping-2wp87\" (UID: \"4ec2b050-b2dd-4c44-9d5b-29babc0e4d0b\") " pod="openstack/nova-cell1-cell-mapping-2wp87" Dec 05 17:37:08 crc kubenswrapper[4753]: I1205 17:37:08.198114 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4ec2b050-b2dd-4c44-9d5b-29babc0e4d0b-config-data\") pod \"nova-cell1-cell-mapping-2wp87\" (UID: \"4ec2b050-b2dd-4c44-9d5b-29babc0e4d0b\") " pod="openstack/nova-cell1-cell-mapping-2wp87" Dec 05 17:37:08 crc kubenswrapper[4753]: I1205 17:37:08.198320 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ec2b050-b2dd-4c44-9d5b-29babc0e4d0b-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-2wp87\" (UID: \"4ec2b050-b2dd-4c44-9d5b-29babc0e4d0b\") " pod="openstack/nova-cell1-cell-mapping-2wp87" Dec 05 17:37:08 crc kubenswrapper[4753]: I1205 17:37:08.300492 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ec2b050-b2dd-4c44-9d5b-29babc0e4d0b-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-2wp87\" (UID: \"4ec2b050-b2dd-4c44-9d5b-29babc0e4d0b\") " pod="openstack/nova-cell1-cell-mapping-2wp87" Dec 05 17:37:08 crc kubenswrapper[4753]: I1205 17:37:08.300580 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4thcv\" (UniqueName: \"kubernetes.io/projected/4ec2b050-b2dd-4c44-9d5b-29babc0e4d0b-kube-api-access-4thcv\") pod \"nova-cell1-cell-mapping-2wp87\" (UID: \"4ec2b050-b2dd-4c44-9d5b-29babc0e4d0b\") " pod="openstack/nova-cell1-cell-mapping-2wp87" Dec 05 17:37:08 crc kubenswrapper[4753]: I1205 17:37:08.300605 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4ec2b050-b2dd-4c44-9d5b-29babc0e4d0b-scripts\") pod \"nova-cell1-cell-mapping-2wp87\" (UID: \"4ec2b050-b2dd-4c44-9d5b-29babc0e4d0b\") " pod="openstack/nova-cell1-cell-mapping-2wp87" Dec 05 17:37:08 crc kubenswrapper[4753]: I1205 17:37:08.300739 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4ec2b050-b2dd-4c44-9d5b-29babc0e4d0b-config-data\") pod \"nova-cell1-cell-mapping-2wp87\" (UID: \"4ec2b050-b2dd-4c44-9d5b-29babc0e4d0b\") " pod="openstack/nova-cell1-cell-mapping-2wp87" Dec 05 17:37:08 crc kubenswrapper[4753]: I1205 17:37:08.306959 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4ec2b050-b2dd-4c44-9d5b-29babc0e4d0b-scripts\") pod \"nova-cell1-cell-mapping-2wp87\" (UID: \"4ec2b050-b2dd-4c44-9d5b-29babc0e4d0b\") " pod="openstack/nova-cell1-cell-mapping-2wp87" Dec 05 17:37:08 crc kubenswrapper[4753]: I1205 17:37:08.307041 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4ec2b050-b2dd-4c44-9d5b-29babc0e4d0b-config-data\") pod \"nova-cell1-cell-mapping-2wp87\" (UID: \"4ec2b050-b2dd-4c44-9d5b-29babc0e4d0b\") " pod="openstack/nova-cell1-cell-mapping-2wp87" Dec 05 17:37:08 crc kubenswrapper[4753]: I1205 17:37:08.307961 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ec2b050-b2dd-4c44-9d5b-29babc0e4d0b-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-2wp87\" (UID: \"4ec2b050-b2dd-4c44-9d5b-29babc0e4d0b\") " pod="openstack/nova-cell1-cell-mapping-2wp87" Dec 05 17:37:08 crc kubenswrapper[4753]: I1205 17:37:08.324545 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4thcv\" (UniqueName: \"kubernetes.io/projected/4ec2b050-b2dd-4c44-9d5b-29babc0e4d0b-kube-api-access-4thcv\") pod \"nova-cell1-cell-mapping-2wp87\" (UID: \"4ec2b050-b2dd-4c44-9d5b-29babc0e4d0b\") " pod="openstack/nova-cell1-cell-mapping-2wp87" Dec 05 17:37:08 crc kubenswrapper[4753]: I1205 17:37:08.357820 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-2wp87" Dec 05 17:37:08 crc kubenswrapper[4753]: I1205 17:37:08.838843 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3163f6c9-2070-4787-9b8b-d89d2d29a27c","Type":"ContainerStarted","Data":"1cfc1cf52ac40ee3d8b792343ac268277c29047046b61adb1a664a8eaa1dbcbf"} Dec 05 17:37:08 crc kubenswrapper[4753]: I1205 17:37:08.839109 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3163f6c9-2070-4787-9b8b-d89d2d29a27c","Type":"ContainerStarted","Data":"c7610e6976e797caebc6c465bac38a12718b6f31761f4266863b96ad4f6e1b46"} Dec 05 17:37:08 crc kubenswrapper[4753]: I1205 17:37:08.839225 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3163f6c9-2070-4787-9b8b-d89d2d29a27c","Type":"ContainerStarted","Data":"1cead7f63f5b8a25ec8eb804903af3e9af70aa5a9c8c7d738213371e397fc118"} Dec 05 17:37:08 crc kubenswrapper[4753]: I1205 17:37:08.873817 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-2wp87"] Dec 05 17:37:08 crc kubenswrapper[4753]: I1205 17:37:08.885784 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=1.885760942 podStartE2EDuration="1.885760942s" podCreationTimestamp="2025-12-05 17:37:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:37:08.872844507 +0000 UTC m=+1967.375951543" watchObservedRunningTime="2025-12-05 17:37:08.885760942 +0000 UTC m=+1967.388867948" Dec 05 17:37:08 crc kubenswrapper[4753]: I1205 17:37:08.888352 4753 generic.go:334] "Generic (PLEG): container finished" podID="09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50" containerID="582e90ba16b9dab7dd7e81af9f0af341e3ad61841f6c047f52921870bec0697f" exitCode=0 Dec 05 17:37:08 crc kubenswrapper[4753]: I1205 17:37:08.888493 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50","Type":"ContainerDied","Data":"582e90ba16b9dab7dd7e81af9f0af341e3ad61841f6c047f52921870bec0697f"} Dec 05 17:37:09 crc kubenswrapper[4753]: I1205 17:37:09.335566 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 17:37:09 crc kubenswrapper[4753]: I1205 17:37:09.427013 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50-log-httpd\") pod \"09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50\" (UID: \"09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50\") " Dec 05 17:37:09 crc kubenswrapper[4753]: I1205 17:37:09.427069 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50-ceilometer-tls-certs\") pod \"09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50\" (UID: \"09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50\") " Dec 05 17:37:09 crc kubenswrapper[4753]: I1205 17:37:09.427138 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50-sg-core-conf-yaml\") pod \"09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50\" (UID: \"09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50\") " Dec 05 17:37:09 crc kubenswrapper[4753]: I1205 17:37:09.427203 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50-scripts\") pod \"09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50\" (UID: \"09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50\") " Dec 05 17:37:09 crc kubenswrapper[4753]: I1205 17:37:09.427284 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50-run-httpd\") pod \"09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50\" (UID: \"09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50\") " Dec 05 17:37:09 crc kubenswrapper[4753]: I1205 17:37:09.427794 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50" (UID: "09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:37:09 crc kubenswrapper[4753]: I1205 17:37:09.428070 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50" (UID: "09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:37:09 crc kubenswrapper[4753]: I1205 17:37:09.428129 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50-combined-ca-bundle\") pod \"09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50\" (UID: \"09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50\") " Dec 05 17:37:09 crc kubenswrapper[4753]: I1205 17:37:09.428198 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8cc8f\" (UniqueName: \"kubernetes.io/projected/09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50-kube-api-access-8cc8f\") pod \"09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50\" (UID: \"09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50\") " Dec 05 17:37:09 crc kubenswrapper[4753]: I1205 17:37:09.428248 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50-config-data\") pod \"09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50\" (UID: \"09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50\") " Dec 05 17:37:09 crc kubenswrapper[4753]: I1205 17:37:09.428897 4753 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 17:37:09 crc kubenswrapper[4753]: I1205 17:37:09.428920 4753 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 17:37:09 crc kubenswrapper[4753]: I1205 17:37:09.444568 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50-scripts" (OuterVolumeSpecName: "scripts") pod "09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50" (UID: "09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:37:09 crc kubenswrapper[4753]: I1205 17:37:09.457088 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50-kube-api-access-8cc8f" (OuterVolumeSpecName: "kube-api-access-8cc8f") pod "09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50" (UID: "09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50"). InnerVolumeSpecName "kube-api-access-8cc8f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:37:09 crc kubenswrapper[4753]: I1205 17:37:09.520817 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50" (UID: "09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:37:09 crc kubenswrapper[4753]: I1205 17:37:09.531237 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8cc8f\" (UniqueName: \"kubernetes.io/projected/09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50-kube-api-access-8cc8f\") on node \"crc\" DevicePath \"\"" Dec 05 17:37:09 crc kubenswrapper[4753]: I1205 17:37:09.531264 4753 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 17:37:09 crc kubenswrapper[4753]: I1205 17:37:09.531273 4753 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:37:09 crc kubenswrapper[4753]: I1205 17:37:09.566174 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50" (UID: "09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:37:09 crc kubenswrapper[4753]: I1205 17:37:09.569090 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50" (UID: "09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:37:09 crc kubenswrapper[4753]: I1205 17:37:09.607746 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50-config-data" (OuterVolumeSpecName: "config-data") pod "09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50" (UID: "09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:37:09 crc kubenswrapper[4753]: I1205 17:37:09.633622 4753 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 17:37:09 crc kubenswrapper[4753]: I1205 17:37:09.633655 4753 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:37:09 crc kubenswrapper[4753]: I1205 17:37:09.633665 4753 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:37:09 crc kubenswrapper[4753]: I1205 17:37:09.901405 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50","Type":"ContainerDied","Data":"1d0fab62ceb8f422cdcb5065fb2c85fc2cdcd1372de6ed12cf4eabbcd0dc7ae0"} Dec 05 17:37:09 crc kubenswrapper[4753]: I1205 17:37:09.901469 4753 scope.go:117] "RemoveContainer" containerID="af3b62f84f826fb6e4c4205740314289f2b67985daecbcf7786018bcd87d0e39" Dec 05 17:37:09 crc kubenswrapper[4753]: I1205 17:37:09.901614 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 17:37:09 crc kubenswrapper[4753]: I1205 17:37:09.906800 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-2wp87" event={"ID":"4ec2b050-b2dd-4c44-9d5b-29babc0e4d0b","Type":"ContainerStarted","Data":"e0368f1e24c0e1f2b65d96f6c4f0ce9fe6637474024a019d438ee2e8d82a1ec6"} Dec 05 17:37:09 crc kubenswrapper[4753]: I1205 17:37:09.906847 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-2wp87" event={"ID":"4ec2b050-b2dd-4c44-9d5b-29babc0e4d0b","Type":"ContainerStarted","Data":"02f2f7d672bb3907f1f0256d0911f845928c6111248eda8e4e8249be1389dbfc"} Dec 05 17:37:09 crc kubenswrapper[4753]: I1205 17:37:09.938398 4753 scope.go:117] "RemoveContainer" containerID="7ec5b6a9e8ea30d60bac68561b89c727681c294f93cbebb95dceb0eb0ea4b5a9" Dec 05 17:37:09 crc kubenswrapper[4753]: I1205 17:37:09.943727 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-2wp87" podStartSLOduration=2.943710667 podStartE2EDuration="2.943710667s" podCreationTimestamp="2025-12-05 17:37:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:37:09.927579441 +0000 UTC m=+1968.430686437" watchObservedRunningTime="2025-12-05 17:37:09.943710667 +0000 UTC m=+1968.446817673" Dec 05 17:37:09 crc kubenswrapper[4753]: I1205 17:37:09.953475 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:37:09 crc kubenswrapper[4753]: I1205 17:37:09.962693 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:37:09 crc kubenswrapper[4753]: I1205 17:37:09.963322 4753 scope.go:117] "RemoveContainer" containerID="aaad6c895ea31076713cb9ed34c08c84cd4f9ed5bc5620978dec95bc14a459bc" Dec 05 17:37:09 crc kubenswrapper[4753]: I1205 17:37:09.985867 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:37:09 crc kubenswrapper[4753]: E1205 17:37:09.986448 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50" containerName="sg-core" Dec 05 17:37:09 crc kubenswrapper[4753]: I1205 17:37:09.986475 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50" containerName="sg-core" Dec 05 17:37:09 crc kubenswrapper[4753]: E1205 17:37:09.986497 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50" containerName="proxy-httpd" Dec 05 17:37:09 crc kubenswrapper[4753]: I1205 17:37:09.986507 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50" containerName="proxy-httpd" Dec 05 17:37:09 crc kubenswrapper[4753]: E1205 17:37:09.986566 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50" containerName="ceilometer-central-agent" Dec 05 17:37:09 crc kubenswrapper[4753]: I1205 17:37:09.986577 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50" containerName="ceilometer-central-agent" Dec 05 17:37:09 crc kubenswrapper[4753]: E1205 17:37:09.986587 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50" containerName="ceilometer-notification-agent" Dec 05 17:37:09 crc kubenswrapper[4753]: I1205 17:37:09.986595 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50" containerName="ceilometer-notification-agent" Dec 05 17:37:09 crc kubenswrapper[4753]: I1205 17:37:09.986826 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50" containerName="ceilometer-central-agent" Dec 05 17:37:09 crc kubenswrapper[4753]: I1205 17:37:09.986856 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50" containerName="ceilometer-notification-agent" Dec 05 17:37:09 crc kubenswrapper[4753]: I1205 17:37:09.986867 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50" containerName="proxy-httpd" Dec 05 17:37:09 crc kubenswrapper[4753]: I1205 17:37:09.986878 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50" containerName="sg-core" Dec 05 17:37:09 crc kubenswrapper[4753]: I1205 17:37:09.989461 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 17:37:09 crc kubenswrapper[4753]: I1205 17:37:09.992079 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 17:37:09 crc kubenswrapper[4753]: I1205 17:37:09.992318 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 05 17:37:09 crc kubenswrapper[4753]: I1205 17:37:09.993458 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 17:37:09 crc kubenswrapper[4753]: I1205 17:37:09.999288 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:37:10 crc kubenswrapper[4753]: I1205 17:37:10.005276 4753 scope.go:117] "RemoveContainer" containerID="582e90ba16b9dab7dd7e81af9f0af341e3ad61841f6c047f52921870bec0697f" Dec 05 17:37:10 crc kubenswrapper[4753]: I1205 17:37:10.148076 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ef421b3a-61a8-4d2b-a898-caff08ee8231-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ef421b3a-61a8-4d2b-a898-caff08ee8231\") " pod="openstack/ceilometer-0" Dec 05 17:37:10 crc kubenswrapper[4753]: I1205 17:37:10.152888 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ef421b3a-61a8-4d2b-a898-caff08ee8231-log-httpd\") pod \"ceilometer-0\" (UID: \"ef421b3a-61a8-4d2b-a898-caff08ee8231\") " pod="openstack/ceilometer-0" Dec 05 17:37:10 crc kubenswrapper[4753]: I1205 17:37:10.153138 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/ef421b3a-61a8-4d2b-a898-caff08ee8231-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"ef421b3a-61a8-4d2b-a898-caff08ee8231\") " pod="openstack/ceilometer-0" Dec 05 17:37:10 crc kubenswrapper[4753]: I1205 17:37:10.153244 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ef421b3a-61a8-4d2b-a898-caff08ee8231-scripts\") pod \"ceilometer-0\" (UID: \"ef421b3a-61a8-4d2b-a898-caff08ee8231\") " pod="openstack/ceilometer-0" Dec 05 17:37:10 crc kubenswrapper[4753]: I1205 17:37:10.153284 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef421b3a-61a8-4d2b-a898-caff08ee8231-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ef421b3a-61a8-4d2b-a898-caff08ee8231\") " pod="openstack/ceilometer-0" Dec 05 17:37:10 crc kubenswrapper[4753]: I1205 17:37:10.153372 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-56fc6\" (UniqueName: \"kubernetes.io/projected/ef421b3a-61a8-4d2b-a898-caff08ee8231-kube-api-access-56fc6\") pod \"ceilometer-0\" (UID: \"ef421b3a-61a8-4d2b-a898-caff08ee8231\") " pod="openstack/ceilometer-0" Dec 05 17:37:10 crc kubenswrapper[4753]: I1205 17:37:10.153704 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef421b3a-61a8-4d2b-a898-caff08ee8231-config-data\") pod \"ceilometer-0\" (UID: \"ef421b3a-61a8-4d2b-a898-caff08ee8231\") " pod="openstack/ceilometer-0" Dec 05 17:37:10 crc kubenswrapper[4753]: I1205 17:37:10.153864 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ef421b3a-61a8-4d2b-a898-caff08ee8231-run-httpd\") pod \"ceilometer-0\" (UID: \"ef421b3a-61a8-4d2b-a898-caff08ee8231\") " pod="openstack/ceilometer-0" Dec 05 17:37:10 crc kubenswrapper[4753]: I1205 17:37:10.256033 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ef421b3a-61a8-4d2b-a898-caff08ee8231-run-httpd\") pod \"ceilometer-0\" (UID: \"ef421b3a-61a8-4d2b-a898-caff08ee8231\") " pod="openstack/ceilometer-0" Dec 05 17:37:10 crc kubenswrapper[4753]: I1205 17:37:10.256089 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ef421b3a-61a8-4d2b-a898-caff08ee8231-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ef421b3a-61a8-4d2b-a898-caff08ee8231\") " pod="openstack/ceilometer-0" Dec 05 17:37:10 crc kubenswrapper[4753]: I1205 17:37:10.256119 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ef421b3a-61a8-4d2b-a898-caff08ee8231-log-httpd\") pod \"ceilometer-0\" (UID: \"ef421b3a-61a8-4d2b-a898-caff08ee8231\") " pod="openstack/ceilometer-0" Dec 05 17:37:10 crc kubenswrapper[4753]: I1205 17:37:10.256178 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/ef421b3a-61a8-4d2b-a898-caff08ee8231-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"ef421b3a-61a8-4d2b-a898-caff08ee8231\") " pod="openstack/ceilometer-0" Dec 05 17:37:10 crc kubenswrapper[4753]: I1205 17:37:10.256200 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ef421b3a-61a8-4d2b-a898-caff08ee8231-scripts\") pod \"ceilometer-0\" (UID: \"ef421b3a-61a8-4d2b-a898-caff08ee8231\") " pod="openstack/ceilometer-0" Dec 05 17:37:10 crc kubenswrapper[4753]: I1205 17:37:10.256852 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef421b3a-61a8-4d2b-a898-caff08ee8231-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ef421b3a-61a8-4d2b-a898-caff08ee8231\") " pod="openstack/ceilometer-0" Dec 05 17:37:10 crc kubenswrapper[4753]: I1205 17:37:10.256894 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-56fc6\" (UniqueName: \"kubernetes.io/projected/ef421b3a-61a8-4d2b-a898-caff08ee8231-kube-api-access-56fc6\") pod \"ceilometer-0\" (UID: \"ef421b3a-61a8-4d2b-a898-caff08ee8231\") " pod="openstack/ceilometer-0" Dec 05 17:37:10 crc kubenswrapper[4753]: I1205 17:37:10.256974 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef421b3a-61a8-4d2b-a898-caff08ee8231-config-data\") pod \"ceilometer-0\" (UID: \"ef421b3a-61a8-4d2b-a898-caff08ee8231\") " pod="openstack/ceilometer-0" Dec 05 17:37:10 crc kubenswrapper[4753]: I1205 17:37:10.256993 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ef421b3a-61a8-4d2b-a898-caff08ee8231-run-httpd\") pod \"ceilometer-0\" (UID: \"ef421b3a-61a8-4d2b-a898-caff08ee8231\") " pod="openstack/ceilometer-0" Dec 05 17:37:10 crc kubenswrapper[4753]: I1205 17:37:10.257301 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ef421b3a-61a8-4d2b-a898-caff08ee8231-log-httpd\") pod \"ceilometer-0\" (UID: \"ef421b3a-61a8-4d2b-a898-caff08ee8231\") " pod="openstack/ceilometer-0" Dec 05 17:37:10 crc kubenswrapper[4753]: I1205 17:37:10.261314 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef421b3a-61a8-4d2b-a898-caff08ee8231-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ef421b3a-61a8-4d2b-a898-caff08ee8231\") " pod="openstack/ceilometer-0" Dec 05 17:37:10 crc kubenswrapper[4753]: I1205 17:37:10.261883 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ef421b3a-61a8-4d2b-a898-caff08ee8231-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ef421b3a-61a8-4d2b-a898-caff08ee8231\") " pod="openstack/ceilometer-0" Dec 05 17:37:10 crc kubenswrapper[4753]: I1205 17:37:10.261972 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/ef421b3a-61a8-4d2b-a898-caff08ee8231-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"ef421b3a-61a8-4d2b-a898-caff08ee8231\") " pod="openstack/ceilometer-0" Dec 05 17:37:10 crc kubenswrapper[4753]: I1205 17:37:10.264180 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef421b3a-61a8-4d2b-a898-caff08ee8231-config-data\") pod \"ceilometer-0\" (UID: \"ef421b3a-61a8-4d2b-a898-caff08ee8231\") " pod="openstack/ceilometer-0" Dec 05 17:37:10 crc kubenswrapper[4753]: I1205 17:37:10.277036 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-78468d7767-zhgg5" Dec 05 17:37:10 crc kubenswrapper[4753]: I1205 17:37:10.288297 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ef421b3a-61a8-4d2b-a898-caff08ee8231-scripts\") pod \"ceilometer-0\" (UID: \"ef421b3a-61a8-4d2b-a898-caff08ee8231\") " pod="openstack/ceilometer-0" Dec 05 17:37:10 crc kubenswrapper[4753]: I1205 17:37:10.294984 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-56fc6\" (UniqueName: \"kubernetes.io/projected/ef421b3a-61a8-4d2b-a898-caff08ee8231-kube-api-access-56fc6\") pod \"ceilometer-0\" (UID: \"ef421b3a-61a8-4d2b-a898-caff08ee8231\") " pod="openstack/ceilometer-0" Dec 05 17:37:10 crc kubenswrapper[4753]: I1205 17:37:10.345708 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7c9cb78d75-pkjmb"] Dec 05 17:37:10 crc kubenswrapper[4753]: I1205 17:37:10.347080 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7c9cb78d75-pkjmb" podUID="ab2065af-a1a7-4876-a066-37f7f01a8435" containerName="dnsmasq-dns" containerID="cri-o://86183eb8d8bdb6d521faa4ede93d1ee375c0da43d4b9c858f5b0c60eb01727f4" gracePeriod=10 Dec 05 17:37:10 crc kubenswrapper[4753]: I1205 17:37:10.353350 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 17:37:10 crc kubenswrapper[4753]: I1205 17:37:10.926972 4753 generic.go:334] "Generic (PLEG): container finished" podID="ab2065af-a1a7-4876-a066-37f7f01a8435" containerID="86183eb8d8bdb6d521faa4ede93d1ee375c0da43d4b9c858f5b0c60eb01727f4" exitCode=0 Dec 05 17:37:10 crc kubenswrapper[4753]: I1205 17:37:10.927371 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c9cb78d75-pkjmb" event={"ID":"ab2065af-a1a7-4876-a066-37f7f01a8435","Type":"ContainerDied","Data":"86183eb8d8bdb6d521faa4ede93d1ee375c0da43d4b9c858f5b0c60eb01727f4"} Dec 05 17:37:10 crc kubenswrapper[4753]: I1205 17:37:10.955726 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:37:11 crc kubenswrapper[4753]: I1205 17:37:11.166036 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c9cb78d75-pkjmb" Dec 05 17:37:11 crc kubenswrapper[4753]: I1205 17:37:11.284447 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ab2065af-a1a7-4876-a066-37f7f01a8435-dns-svc\") pod \"ab2065af-a1a7-4876-a066-37f7f01a8435\" (UID: \"ab2065af-a1a7-4876-a066-37f7f01a8435\") " Dec 05 17:37:11 crc kubenswrapper[4753]: I1205 17:37:11.284809 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ab2065af-a1a7-4876-a066-37f7f01a8435-config\") pod \"ab2065af-a1a7-4876-a066-37f7f01a8435\" (UID: \"ab2065af-a1a7-4876-a066-37f7f01a8435\") " Dec 05 17:37:11 crc kubenswrapper[4753]: I1205 17:37:11.284839 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ab2065af-a1a7-4876-a066-37f7f01a8435-ovsdbserver-nb\") pod \"ab2065af-a1a7-4876-a066-37f7f01a8435\" (UID: \"ab2065af-a1a7-4876-a066-37f7f01a8435\") " Dec 05 17:37:11 crc kubenswrapper[4753]: I1205 17:37:11.284942 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ck67s\" (UniqueName: \"kubernetes.io/projected/ab2065af-a1a7-4876-a066-37f7f01a8435-kube-api-access-ck67s\") pod \"ab2065af-a1a7-4876-a066-37f7f01a8435\" (UID: \"ab2065af-a1a7-4876-a066-37f7f01a8435\") " Dec 05 17:37:11 crc kubenswrapper[4753]: I1205 17:37:11.285071 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ab2065af-a1a7-4876-a066-37f7f01a8435-dns-swift-storage-0\") pod \"ab2065af-a1a7-4876-a066-37f7f01a8435\" (UID: \"ab2065af-a1a7-4876-a066-37f7f01a8435\") " Dec 05 17:37:11 crc kubenswrapper[4753]: I1205 17:37:11.285162 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ab2065af-a1a7-4876-a066-37f7f01a8435-ovsdbserver-sb\") pod \"ab2065af-a1a7-4876-a066-37f7f01a8435\" (UID: \"ab2065af-a1a7-4876-a066-37f7f01a8435\") " Dec 05 17:37:11 crc kubenswrapper[4753]: I1205 17:37:11.291411 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab2065af-a1a7-4876-a066-37f7f01a8435-kube-api-access-ck67s" (OuterVolumeSpecName: "kube-api-access-ck67s") pod "ab2065af-a1a7-4876-a066-37f7f01a8435" (UID: "ab2065af-a1a7-4876-a066-37f7f01a8435"). InnerVolumeSpecName "kube-api-access-ck67s". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:37:11 crc kubenswrapper[4753]: I1205 17:37:11.343610 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ab2065af-a1a7-4876-a066-37f7f01a8435-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "ab2065af-a1a7-4876-a066-37f7f01a8435" (UID: "ab2065af-a1a7-4876-a066-37f7f01a8435"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:37:11 crc kubenswrapper[4753]: I1205 17:37:11.347752 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ab2065af-a1a7-4876-a066-37f7f01a8435-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "ab2065af-a1a7-4876-a066-37f7f01a8435" (UID: "ab2065af-a1a7-4876-a066-37f7f01a8435"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:37:11 crc kubenswrapper[4753]: I1205 17:37:11.350271 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ab2065af-a1a7-4876-a066-37f7f01a8435-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "ab2065af-a1a7-4876-a066-37f7f01a8435" (UID: "ab2065af-a1a7-4876-a066-37f7f01a8435"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:37:11 crc kubenswrapper[4753]: I1205 17:37:11.372358 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ab2065af-a1a7-4876-a066-37f7f01a8435-config" (OuterVolumeSpecName: "config") pod "ab2065af-a1a7-4876-a066-37f7f01a8435" (UID: "ab2065af-a1a7-4876-a066-37f7f01a8435"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:37:11 crc kubenswrapper[4753]: I1205 17:37:11.387524 4753 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ab2065af-a1a7-4876-a066-37f7f01a8435-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 17:37:11 crc kubenswrapper[4753]: I1205 17:37:11.387566 4753 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ab2065af-a1a7-4876-a066-37f7f01a8435-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:37:11 crc kubenswrapper[4753]: I1205 17:37:11.387579 4753 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ab2065af-a1a7-4876-a066-37f7f01a8435-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 17:37:11 crc kubenswrapper[4753]: I1205 17:37:11.387595 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ck67s\" (UniqueName: \"kubernetes.io/projected/ab2065af-a1a7-4876-a066-37f7f01a8435-kube-api-access-ck67s\") on node \"crc\" DevicePath \"\"" Dec 05 17:37:11 crc kubenswrapper[4753]: I1205 17:37:11.387608 4753 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ab2065af-a1a7-4876-a066-37f7f01a8435-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 17:37:11 crc kubenswrapper[4753]: I1205 17:37:11.393671 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ab2065af-a1a7-4876-a066-37f7f01a8435-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "ab2065af-a1a7-4876-a066-37f7f01a8435" (UID: "ab2065af-a1a7-4876-a066-37f7f01a8435"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:37:11 crc kubenswrapper[4753]: I1205 17:37:11.489822 4753 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ab2065af-a1a7-4876-a066-37f7f01a8435-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 17:37:11 crc kubenswrapper[4753]: I1205 17:37:11.730701 4753 scope.go:117] "RemoveContainer" containerID="87682a74661e693e498cd793cc20d16fc9f4a3b8a1a6b54f10285e2dcd15eafd" Dec 05 17:37:11 crc kubenswrapper[4753]: E1205 17:37:11.731200 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 17:37:11 crc kubenswrapper[4753]: I1205 17:37:11.736547 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50" path="/var/lib/kubelet/pods/09bdaaf1-9ac6-4a17-a9b7-ba6e6a63bf50/volumes" Dec 05 17:37:11 crc kubenswrapper[4753]: I1205 17:37:11.942208 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7c9cb78d75-pkjmb" event={"ID":"ab2065af-a1a7-4876-a066-37f7f01a8435","Type":"ContainerDied","Data":"9a650a62fe14ffff8159bbae0f92005596b88c16e554d4a9d1b82e92b73b047f"} Dec 05 17:37:11 crc kubenswrapper[4753]: I1205 17:37:11.942269 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7c9cb78d75-pkjmb" Dec 05 17:37:11 crc kubenswrapper[4753]: I1205 17:37:11.942532 4753 scope.go:117] "RemoveContainer" containerID="86183eb8d8bdb6d521faa4ede93d1ee375c0da43d4b9c858f5b0c60eb01727f4" Dec 05 17:37:11 crc kubenswrapper[4753]: I1205 17:37:11.946850 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ef421b3a-61a8-4d2b-a898-caff08ee8231","Type":"ContainerStarted","Data":"0ebb2942cb3a18e5a6a3c057ac9cbe41ceabad9e6b89abc171423ac25c0cb082"} Dec 05 17:37:11 crc kubenswrapper[4753]: I1205 17:37:11.946914 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ef421b3a-61a8-4d2b-a898-caff08ee8231","Type":"ContainerStarted","Data":"78e66313143301feec1d810f60b8160f9daea6634f861a9a98b6c2a0e3ba53ed"} Dec 05 17:37:11 crc kubenswrapper[4753]: I1205 17:37:11.984944 4753 scope.go:117] "RemoveContainer" containerID="33a3651c422c8e2f273387cc663e42074bd20878e632ec75569c59a75349949f" Dec 05 17:37:12 crc kubenswrapper[4753]: I1205 17:37:12.015458 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7c9cb78d75-pkjmb"] Dec 05 17:37:12 crc kubenswrapper[4753]: I1205 17:37:12.028163 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7c9cb78d75-pkjmb"] Dec 05 17:37:12 crc kubenswrapper[4753]: I1205 17:37:12.995020 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ef421b3a-61a8-4d2b-a898-caff08ee8231","Type":"ContainerStarted","Data":"585991e5822f2e5746af935e2679c1def9a620efdf897d3f8baebcc742703d25"} Dec 05 17:37:13 crc kubenswrapper[4753]: I1205 17:37:13.735059 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ab2065af-a1a7-4876-a066-37f7f01a8435" path="/var/lib/kubelet/pods/ab2065af-a1a7-4876-a066-37f7f01a8435/volumes" Dec 05 17:37:14 crc kubenswrapper[4753]: I1205 17:37:14.008248 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ef421b3a-61a8-4d2b-a898-caff08ee8231","Type":"ContainerStarted","Data":"24b507fc9cf7f1035986a1b120d9a3554238340c74c54783118d9180f2548a9d"} Dec 05 17:37:15 crc kubenswrapper[4753]: I1205 17:37:15.022509 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ef421b3a-61a8-4d2b-a898-caff08ee8231","Type":"ContainerStarted","Data":"baf097a164a82f254af442ebcd5509650b111625cf07894894ad06f8a98a17d9"} Dec 05 17:37:15 crc kubenswrapper[4753]: I1205 17:37:15.024023 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 17:37:15 crc kubenswrapper[4753]: I1205 17:37:15.058561 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.777575049 podStartE2EDuration="6.058529535s" podCreationTimestamp="2025-12-05 17:37:09 +0000 UTC" firstStartedPulling="2025-12-05 17:37:10.956402501 +0000 UTC m=+1969.459509507" lastFinishedPulling="2025-12-05 17:37:14.237356987 +0000 UTC m=+1972.740463993" observedRunningTime="2025-12-05 17:37:15.039454135 +0000 UTC m=+1973.542561161" watchObservedRunningTime="2025-12-05 17:37:15.058529535 +0000 UTC m=+1973.561636571" Dec 05 17:37:16 crc kubenswrapper[4753]: I1205 17:37:16.041978 4753 generic.go:334] "Generic (PLEG): container finished" podID="4ec2b050-b2dd-4c44-9d5b-29babc0e4d0b" containerID="e0368f1e24c0e1f2b65d96f6c4f0ce9fe6637474024a019d438ee2e8d82a1ec6" exitCode=0 Dec 05 17:37:16 crc kubenswrapper[4753]: I1205 17:37:16.042057 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-2wp87" event={"ID":"4ec2b050-b2dd-4c44-9d5b-29babc0e4d0b","Type":"ContainerDied","Data":"e0368f1e24c0e1f2b65d96f6c4f0ce9fe6637474024a019d438ee2e8d82a1ec6"} Dec 05 17:37:17 crc kubenswrapper[4753]: I1205 17:37:17.522293 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 17:37:17 crc kubenswrapper[4753]: I1205 17:37:17.522792 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 17:37:17 crc kubenswrapper[4753]: I1205 17:37:17.541375 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-2wp87" Dec 05 17:37:17 crc kubenswrapper[4753]: I1205 17:37:17.636337 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4ec2b050-b2dd-4c44-9d5b-29babc0e4d0b-scripts\") pod \"4ec2b050-b2dd-4c44-9d5b-29babc0e4d0b\" (UID: \"4ec2b050-b2dd-4c44-9d5b-29babc0e4d0b\") " Dec 05 17:37:17 crc kubenswrapper[4753]: I1205 17:37:17.636462 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4ec2b050-b2dd-4c44-9d5b-29babc0e4d0b-config-data\") pod \"4ec2b050-b2dd-4c44-9d5b-29babc0e4d0b\" (UID: \"4ec2b050-b2dd-4c44-9d5b-29babc0e4d0b\") " Dec 05 17:37:17 crc kubenswrapper[4753]: I1205 17:37:17.636765 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4thcv\" (UniqueName: \"kubernetes.io/projected/4ec2b050-b2dd-4c44-9d5b-29babc0e4d0b-kube-api-access-4thcv\") pod \"4ec2b050-b2dd-4c44-9d5b-29babc0e4d0b\" (UID: \"4ec2b050-b2dd-4c44-9d5b-29babc0e4d0b\") " Dec 05 17:37:17 crc kubenswrapper[4753]: I1205 17:37:17.636815 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ec2b050-b2dd-4c44-9d5b-29babc0e4d0b-combined-ca-bundle\") pod \"4ec2b050-b2dd-4c44-9d5b-29babc0e4d0b\" (UID: \"4ec2b050-b2dd-4c44-9d5b-29babc0e4d0b\") " Dec 05 17:37:17 crc kubenswrapper[4753]: I1205 17:37:17.642664 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4ec2b050-b2dd-4c44-9d5b-29babc0e4d0b-kube-api-access-4thcv" (OuterVolumeSpecName: "kube-api-access-4thcv") pod "4ec2b050-b2dd-4c44-9d5b-29babc0e4d0b" (UID: "4ec2b050-b2dd-4c44-9d5b-29babc0e4d0b"). InnerVolumeSpecName "kube-api-access-4thcv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:37:17 crc kubenswrapper[4753]: I1205 17:37:17.644443 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4ec2b050-b2dd-4c44-9d5b-29babc0e4d0b-scripts" (OuterVolumeSpecName: "scripts") pod "4ec2b050-b2dd-4c44-9d5b-29babc0e4d0b" (UID: "4ec2b050-b2dd-4c44-9d5b-29babc0e4d0b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:37:17 crc kubenswrapper[4753]: I1205 17:37:17.689433 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4ec2b050-b2dd-4c44-9d5b-29babc0e4d0b-config-data" (OuterVolumeSpecName: "config-data") pod "4ec2b050-b2dd-4c44-9d5b-29babc0e4d0b" (UID: "4ec2b050-b2dd-4c44-9d5b-29babc0e4d0b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:37:17 crc kubenswrapper[4753]: I1205 17:37:17.712298 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4ec2b050-b2dd-4c44-9d5b-29babc0e4d0b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4ec2b050-b2dd-4c44-9d5b-29babc0e4d0b" (UID: "4ec2b050-b2dd-4c44-9d5b-29babc0e4d0b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:37:17 crc kubenswrapper[4753]: I1205 17:37:17.741733 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4thcv\" (UniqueName: \"kubernetes.io/projected/4ec2b050-b2dd-4c44-9d5b-29babc0e4d0b-kube-api-access-4thcv\") on node \"crc\" DevicePath \"\"" Dec 05 17:37:17 crc kubenswrapper[4753]: I1205 17:37:17.741765 4753 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ec2b050-b2dd-4c44-9d5b-29babc0e4d0b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:37:17 crc kubenswrapper[4753]: I1205 17:37:17.741779 4753 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4ec2b050-b2dd-4c44-9d5b-29babc0e4d0b-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:37:17 crc kubenswrapper[4753]: I1205 17:37:17.741790 4753 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4ec2b050-b2dd-4c44-9d5b-29babc0e4d0b-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:37:18 crc kubenswrapper[4753]: I1205 17:37:18.063769 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-2wp87" event={"ID":"4ec2b050-b2dd-4c44-9d5b-29babc0e4d0b","Type":"ContainerDied","Data":"02f2f7d672bb3907f1f0256d0911f845928c6111248eda8e4e8249be1389dbfc"} Dec 05 17:37:18 crc kubenswrapper[4753]: I1205 17:37:18.063812 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="02f2f7d672bb3907f1f0256d0911f845928c6111248eda8e4e8249be1389dbfc" Dec 05 17:37:18 crc kubenswrapper[4753]: I1205 17:37:18.063838 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-2wp87" Dec 05 17:37:18 crc kubenswrapper[4753]: I1205 17:37:18.247089 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 17:37:18 crc kubenswrapper[4753]: I1205 17:37:18.247370 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="3163f6c9-2070-4787-9b8b-d89d2d29a27c" containerName="nova-api-log" containerID="cri-o://c7610e6976e797caebc6c465bac38a12718b6f31761f4266863b96ad4f6e1b46" gracePeriod=30 Dec 05 17:37:18 crc kubenswrapper[4753]: I1205 17:37:18.247477 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="3163f6c9-2070-4787-9b8b-d89d2d29a27c" containerName="nova-api-api" containerID="cri-o://1cfc1cf52ac40ee3d8b792343ac268277c29047046b61adb1a664a8eaa1dbcbf" gracePeriod=30 Dec 05 17:37:18 crc kubenswrapper[4753]: I1205 17:37:18.254288 4753 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="3163f6c9-2070-4787-9b8b-d89d2d29a27c" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.227:8774/\": EOF" Dec 05 17:37:18 crc kubenswrapper[4753]: I1205 17:37:18.254300 4753 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="3163f6c9-2070-4787-9b8b-d89d2d29a27c" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.227:8774/\": EOF" Dec 05 17:37:18 crc kubenswrapper[4753]: I1205 17:37:18.279769 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 17:37:18 crc kubenswrapper[4753]: I1205 17:37:18.280037 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="867981ea-803a-423e-ad91-115d899c9c6a" containerName="nova-metadata-log" containerID="cri-o://12a707423e20f5cecc5adbc25e31f6a7ce22c86fc1d25fd7a0194cc838461c3b" gracePeriod=30 Dec 05 17:37:18 crc kubenswrapper[4753]: I1205 17:37:18.280119 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="867981ea-803a-423e-ad91-115d899c9c6a" containerName="nova-metadata-metadata" containerID="cri-o://d4c4b05743bc899ab80f5a44d444e162dd322a02fff7afe9f5fe461afc7187a6" gracePeriod=30 Dec 05 17:37:18 crc kubenswrapper[4753]: I1205 17:37:18.295705 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 17:37:18 crc kubenswrapper[4753]: I1205 17:37:18.295951 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="4af889e3-58fc-4d21-8c35-28e6166eba2e" containerName="nova-scheduler-scheduler" containerID="cri-o://1d44cbc3c67c8a7544913e19dbca97fea44114f6c0933be8d9502d19824f99c3" gracePeriod=30 Dec 05 17:37:19 crc kubenswrapper[4753]: I1205 17:37:19.079504 4753 generic.go:334] "Generic (PLEG): container finished" podID="867981ea-803a-423e-ad91-115d899c9c6a" containerID="12a707423e20f5cecc5adbc25e31f6a7ce22c86fc1d25fd7a0194cc838461c3b" exitCode=143 Dec 05 17:37:19 crc kubenswrapper[4753]: I1205 17:37:19.079614 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"867981ea-803a-423e-ad91-115d899c9c6a","Type":"ContainerDied","Data":"12a707423e20f5cecc5adbc25e31f6a7ce22c86fc1d25fd7a0194cc838461c3b"} Dec 05 17:37:19 crc kubenswrapper[4753]: I1205 17:37:19.083051 4753 generic.go:334] "Generic (PLEG): container finished" podID="3163f6c9-2070-4787-9b8b-d89d2d29a27c" containerID="c7610e6976e797caebc6c465bac38a12718b6f31761f4266863b96ad4f6e1b46" exitCode=143 Dec 05 17:37:19 crc kubenswrapper[4753]: I1205 17:37:19.083109 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3163f6c9-2070-4787-9b8b-d89d2d29a27c","Type":"ContainerDied","Data":"c7610e6976e797caebc6c465bac38a12718b6f31761f4266863b96ad4f6e1b46"} Dec 05 17:37:20 crc kubenswrapper[4753]: I1205 17:37:20.122493 4753 generic.go:334] "Generic (PLEG): container finished" podID="4af889e3-58fc-4d21-8c35-28e6166eba2e" containerID="1d44cbc3c67c8a7544913e19dbca97fea44114f6c0933be8d9502d19824f99c3" exitCode=0 Dec 05 17:37:20 crc kubenswrapper[4753]: I1205 17:37:20.122804 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"4af889e3-58fc-4d21-8c35-28e6166eba2e","Type":"ContainerDied","Data":"1d44cbc3c67c8a7544913e19dbca97fea44114f6c0933be8d9502d19824f99c3"} Dec 05 17:37:20 crc kubenswrapper[4753]: I1205 17:37:20.347334 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 17:37:20 crc kubenswrapper[4753]: I1205 17:37:20.407562 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4af889e3-58fc-4d21-8c35-28e6166eba2e-config-data\") pod \"4af889e3-58fc-4d21-8c35-28e6166eba2e\" (UID: \"4af889e3-58fc-4d21-8c35-28e6166eba2e\") " Dec 05 17:37:20 crc kubenswrapper[4753]: I1205 17:37:20.407754 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zdn44\" (UniqueName: \"kubernetes.io/projected/4af889e3-58fc-4d21-8c35-28e6166eba2e-kube-api-access-zdn44\") pod \"4af889e3-58fc-4d21-8c35-28e6166eba2e\" (UID: \"4af889e3-58fc-4d21-8c35-28e6166eba2e\") " Dec 05 17:37:20 crc kubenswrapper[4753]: I1205 17:37:20.407830 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4af889e3-58fc-4d21-8c35-28e6166eba2e-combined-ca-bundle\") pod \"4af889e3-58fc-4d21-8c35-28e6166eba2e\" (UID: \"4af889e3-58fc-4d21-8c35-28e6166eba2e\") " Dec 05 17:37:20 crc kubenswrapper[4753]: I1205 17:37:20.415720 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4af889e3-58fc-4d21-8c35-28e6166eba2e-kube-api-access-zdn44" (OuterVolumeSpecName: "kube-api-access-zdn44") pod "4af889e3-58fc-4d21-8c35-28e6166eba2e" (UID: "4af889e3-58fc-4d21-8c35-28e6166eba2e"). InnerVolumeSpecName "kube-api-access-zdn44". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:37:20 crc kubenswrapper[4753]: I1205 17:37:20.446676 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4af889e3-58fc-4d21-8c35-28e6166eba2e-config-data" (OuterVolumeSpecName: "config-data") pod "4af889e3-58fc-4d21-8c35-28e6166eba2e" (UID: "4af889e3-58fc-4d21-8c35-28e6166eba2e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:37:20 crc kubenswrapper[4753]: I1205 17:37:20.458053 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4af889e3-58fc-4d21-8c35-28e6166eba2e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4af889e3-58fc-4d21-8c35-28e6166eba2e" (UID: "4af889e3-58fc-4d21-8c35-28e6166eba2e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:37:20 crc kubenswrapper[4753]: I1205 17:37:20.510242 4753 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4af889e3-58fc-4d21-8c35-28e6166eba2e-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:37:20 crc kubenswrapper[4753]: I1205 17:37:20.510272 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zdn44\" (UniqueName: \"kubernetes.io/projected/4af889e3-58fc-4d21-8c35-28e6166eba2e-kube-api-access-zdn44\") on node \"crc\" DevicePath \"\"" Dec 05 17:37:20 crc kubenswrapper[4753]: I1205 17:37:20.510283 4753 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4af889e3-58fc-4d21-8c35-28e6166eba2e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:37:21 crc kubenswrapper[4753]: I1205 17:37:21.135736 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"4af889e3-58fc-4d21-8c35-28e6166eba2e","Type":"ContainerDied","Data":"36cc96fb2b5a6e429e82cf745153d04d0ece1123165f89c4cc3d0ca25bb5e13c"} Dec 05 17:37:21 crc kubenswrapper[4753]: I1205 17:37:21.135814 4753 scope.go:117] "RemoveContainer" containerID="1d44cbc3c67c8a7544913e19dbca97fea44114f6c0933be8d9502d19824f99c3" Dec 05 17:37:21 crc kubenswrapper[4753]: I1205 17:37:21.135983 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 17:37:21 crc kubenswrapper[4753]: I1205 17:37:21.172297 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 17:37:21 crc kubenswrapper[4753]: I1205 17:37:21.181768 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 17:37:21 crc kubenswrapper[4753]: I1205 17:37:21.207206 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 17:37:21 crc kubenswrapper[4753]: E1205 17:37:21.207691 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab2065af-a1a7-4876-a066-37f7f01a8435" containerName="init" Dec 05 17:37:21 crc kubenswrapper[4753]: I1205 17:37:21.207708 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab2065af-a1a7-4876-a066-37f7f01a8435" containerName="init" Dec 05 17:37:21 crc kubenswrapper[4753]: E1205 17:37:21.207726 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4af889e3-58fc-4d21-8c35-28e6166eba2e" containerName="nova-scheduler-scheduler" Dec 05 17:37:21 crc kubenswrapper[4753]: I1205 17:37:21.207734 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="4af889e3-58fc-4d21-8c35-28e6166eba2e" containerName="nova-scheduler-scheduler" Dec 05 17:37:21 crc kubenswrapper[4753]: E1205 17:37:21.207773 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab2065af-a1a7-4876-a066-37f7f01a8435" containerName="dnsmasq-dns" Dec 05 17:37:21 crc kubenswrapper[4753]: I1205 17:37:21.207780 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab2065af-a1a7-4876-a066-37f7f01a8435" containerName="dnsmasq-dns" Dec 05 17:37:21 crc kubenswrapper[4753]: E1205 17:37:21.207796 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ec2b050-b2dd-4c44-9d5b-29babc0e4d0b" containerName="nova-manage" Dec 05 17:37:21 crc kubenswrapper[4753]: I1205 17:37:21.207802 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ec2b050-b2dd-4c44-9d5b-29babc0e4d0b" containerName="nova-manage" Dec 05 17:37:21 crc kubenswrapper[4753]: I1205 17:37:21.208029 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="4ec2b050-b2dd-4c44-9d5b-29babc0e4d0b" containerName="nova-manage" Dec 05 17:37:21 crc kubenswrapper[4753]: I1205 17:37:21.208054 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab2065af-a1a7-4876-a066-37f7f01a8435" containerName="dnsmasq-dns" Dec 05 17:37:21 crc kubenswrapper[4753]: I1205 17:37:21.208064 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="4af889e3-58fc-4d21-8c35-28e6166eba2e" containerName="nova-scheduler-scheduler" Dec 05 17:37:21 crc kubenswrapper[4753]: I1205 17:37:21.208873 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 17:37:21 crc kubenswrapper[4753]: I1205 17:37:21.212265 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 05 17:37:21 crc kubenswrapper[4753]: I1205 17:37:21.217248 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 17:37:21 crc kubenswrapper[4753]: I1205 17:37:21.230680 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f6bb960-f1d1-413e-bd11-aa0d1251135f-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"1f6bb960-f1d1-413e-bd11-aa0d1251135f\") " pod="openstack/nova-scheduler-0" Dec 05 17:37:21 crc kubenswrapper[4753]: I1205 17:37:21.230769 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1f6bb960-f1d1-413e-bd11-aa0d1251135f-config-data\") pod \"nova-scheduler-0\" (UID: \"1f6bb960-f1d1-413e-bd11-aa0d1251135f\") " pod="openstack/nova-scheduler-0" Dec 05 17:37:21 crc kubenswrapper[4753]: I1205 17:37:21.230839 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tlrls\" (UniqueName: \"kubernetes.io/projected/1f6bb960-f1d1-413e-bd11-aa0d1251135f-kube-api-access-tlrls\") pod \"nova-scheduler-0\" (UID: \"1f6bb960-f1d1-413e-bd11-aa0d1251135f\") " pod="openstack/nova-scheduler-0" Dec 05 17:37:21 crc kubenswrapper[4753]: I1205 17:37:21.333930 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1f6bb960-f1d1-413e-bd11-aa0d1251135f-config-data\") pod \"nova-scheduler-0\" (UID: \"1f6bb960-f1d1-413e-bd11-aa0d1251135f\") " pod="openstack/nova-scheduler-0" Dec 05 17:37:21 crc kubenswrapper[4753]: I1205 17:37:21.334757 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tlrls\" (UniqueName: \"kubernetes.io/projected/1f6bb960-f1d1-413e-bd11-aa0d1251135f-kube-api-access-tlrls\") pod \"nova-scheduler-0\" (UID: \"1f6bb960-f1d1-413e-bd11-aa0d1251135f\") " pod="openstack/nova-scheduler-0" Dec 05 17:37:21 crc kubenswrapper[4753]: I1205 17:37:21.334985 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f6bb960-f1d1-413e-bd11-aa0d1251135f-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"1f6bb960-f1d1-413e-bd11-aa0d1251135f\") " pod="openstack/nova-scheduler-0" Dec 05 17:37:21 crc kubenswrapper[4753]: I1205 17:37:21.338212 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1f6bb960-f1d1-413e-bd11-aa0d1251135f-config-data\") pod \"nova-scheduler-0\" (UID: \"1f6bb960-f1d1-413e-bd11-aa0d1251135f\") " pod="openstack/nova-scheduler-0" Dec 05 17:37:21 crc kubenswrapper[4753]: I1205 17:37:21.351079 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f6bb960-f1d1-413e-bd11-aa0d1251135f-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"1f6bb960-f1d1-413e-bd11-aa0d1251135f\") " pod="openstack/nova-scheduler-0" Dec 05 17:37:21 crc kubenswrapper[4753]: I1205 17:37:21.352269 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tlrls\" (UniqueName: \"kubernetes.io/projected/1f6bb960-f1d1-413e-bd11-aa0d1251135f-kube-api-access-tlrls\") pod \"nova-scheduler-0\" (UID: \"1f6bb960-f1d1-413e-bd11-aa0d1251135f\") " pod="openstack/nova-scheduler-0" Dec 05 17:37:21 crc kubenswrapper[4753]: I1205 17:37:21.431212 4753 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="867981ea-803a-423e-ad91-115d899c9c6a" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.220:8775/\": read tcp 10.217.0.2:51852->10.217.0.220:8775: read: connection reset by peer" Dec 05 17:37:21 crc kubenswrapper[4753]: I1205 17:37:21.431273 4753 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="867981ea-803a-423e-ad91-115d899c9c6a" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.220:8775/\": read tcp 10.217.0.2:51858->10.217.0.220:8775: read: connection reset by peer" Dec 05 17:37:21 crc kubenswrapper[4753]: I1205 17:37:21.538910 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 17:37:21 crc kubenswrapper[4753]: I1205 17:37:21.737783 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4af889e3-58fc-4d21-8c35-28e6166eba2e" path="/var/lib/kubelet/pods/4af889e3-58fc-4d21-8c35-28e6166eba2e/volumes" Dec 05 17:37:21 crc kubenswrapper[4753]: I1205 17:37:21.928302 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 17:37:22 crc kubenswrapper[4753]: I1205 17:37:22.048685 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4mmzb\" (UniqueName: \"kubernetes.io/projected/867981ea-803a-423e-ad91-115d899c9c6a-kube-api-access-4mmzb\") pod \"867981ea-803a-423e-ad91-115d899c9c6a\" (UID: \"867981ea-803a-423e-ad91-115d899c9c6a\") " Dec 05 17:37:22 crc kubenswrapper[4753]: I1205 17:37:22.048810 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/867981ea-803a-423e-ad91-115d899c9c6a-nova-metadata-tls-certs\") pod \"867981ea-803a-423e-ad91-115d899c9c6a\" (UID: \"867981ea-803a-423e-ad91-115d899c9c6a\") " Dec 05 17:37:22 crc kubenswrapper[4753]: I1205 17:37:22.048867 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/867981ea-803a-423e-ad91-115d899c9c6a-logs\") pod \"867981ea-803a-423e-ad91-115d899c9c6a\" (UID: \"867981ea-803a-423e-ad91-115d899c9c6a\") " Dec 05 17:37:22 crc kubenswrapper[4753]: I1205 17:37:22.048899 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/867981ea-803a-423e-ad91-115d899c9c6a-config-data\") pod \"867981ea-803a-423e-ad91-115d899c9c6a\" (UID: \"867981ea-803a-423e-ad91-115d899c9c6a\") " Dec 05 17:37:22 crc kubenswrapper[4753]: I1205 17:37:22.049072 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/867981ea-803a-423e-ad91-115d899c9c6a-combined-ca-bundle\") pod \"867981ea-803a-423e-ad91-115d899c9c6a\" (UID: \"867981ea-803a-423e-ad91-115d899c9c6a\") " Dec 05 17:37:22 crc kubenswrapper[4753]: I1205 17:37:22.051514 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/867981ea-803a-423e-ad91-115d899c9c6a-logs" (OuterVolumeSpecName: "logs") pod "867981ea-803a-423e-ad91-115d899c9c6a" (UID: "867981ea-803a-423e-ad91-115d899c9c6a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:37:22 crc kubenswrapper[4753]: I1205 17:37:22.058128 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/867981ea-803a-423e-ad91-115d899c9c6a-kube-api-access-4mmzb" (OuterVolumeSpecName: "kube-api-access-4mmzb") pod "867981ea-803a-423e-ad91-115d899c9c6a" (UID: "867981ea-803a-423e-ad91-115d899c9c6a"). InnerVolumeSpecName "kube-api-access-4mmzb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:37:22 crc kubenswrapper[4753]: I1205 17:37:22.087117 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/867981ea-803a-423e-ad91-115d899c9c6a-config-data" (OuterVolumeSpecName: "config-data") pod "867981ea-803a-423e-ad91-115d899c9c6a" (UID: "867981ea-803a-423e-ad91-115d899c9c6a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:37:22 crc kubenswrapper[4753]: I1205 17:37:22.088821 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/867981ea-803a-423e-ad91-115d899c9c6a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "867981ea-803a-423e-ad91-115d899c9c6a" (UID: "867981ea-803a-423e-ad91-115d899c9c6a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:37:22 crc kubenswrapper[4753]: I1205 17:37:22.090113 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 17:37:22 crc kubenswrapper[4753]: I1205 17:37:22.130314 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/867981ea-803a-423e-ad91-115d899c9c6a-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "867981ea-803a-423e-ad91-115d899c9c6a" (UID: "867981ea-803a-423e-ad91-115d899c9c6a"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:37:22 crc kubenswrapper[4753]: I1205 17:37:22.152983 4753 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/867981ea-803a-423e-ad91-115d899c9c6a-logs\") on node \"crc\" DevicePath \"\"" Dec 05 17:37:22 crc kubenswrapper[4753]: I1205 17:37:22.153021 4753 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/867981ea-803a-423e-ad91-115d899c9c6a-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:37:22 crc kubenswrapper[4753]: I1205 17:37:22.153031 4753 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/867981ea-803a-423e-ad91-115d899c9c6a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:37:22 crc kubenswrapper[4753]: I1205 17:37:22.153044 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4mmzb\" (UniqueName: \"kubernetes.io/projected/867981ea-803a-423e-ad91-115d899c9c6a-kube-api-access-4mmzb\") on node \"crc\" DevicePath \"\"" Dec 05 17:37:22 crc kubenswrapper[4753]: I1205 17:37:22.153053 4753 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/867981ea-803a-423e-ad91-115d899c9c6a-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 17:37:22 crc kubenswrapper[4753]: I1205 17:37:22.160722 4753 generic.go:334] "Generic (PLEG): container finished" podID="867981ea-803a-423e-ad91-115d899c9c6a" containerID="d4c4b05743bc899ab80f5a44d444e162dd322a02fff7afe9f5fe461afc7187a6" exitCode=0 Dec 05 17:37:22 crc kubenswrapper[4753]: I1205 17:37:22.160794 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"867981ea-803a-423e-ad91-115d899c9c6a","Type":"ContainerDied","Data":"d4c4b05743bc899ab80f5a44d444e162dd322a02fff7afe9f5fe461afc7187a6"} Dec 05 17:37:22 crc kubenswrapper[4753]: I1205 17:37:22.160825 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"867981ea-803a-423e-ad91-115d899c9c6a","Type":"ContainerDied","Data":"0afa11853eafef3f62bdd22bbab36476592fe409e7065a9401c396f1f8cbc6da"} Dec 05 17:37:22 crc kubenswrapper[4753]: I1205 17:37:22.160844 4753 scope.go:117] "RemoveContainer" containerID="d4c4b05743bc899ab80f5a44d444e162dd322a02fff7afe9f5fe461afc7187a6" Dec 05 17:37:22 crc kubenswrapper[4753]: I1205 17:37:22.160947 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 17:37:22 crc kubenswrapper[4753]: I1205 17:37:22.167637 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"1f6bb960-f1d1-413e-bd11-aa0d1251135f","Type":"ContainerStarted","Data":"bd50d8e197ceb0f1e5c02ff0faf1ee2a24ee2c957276e53f31a4645d064c43ae"} Dec 05 17:37:22 crc kubenswrapper[4753]: I1205 17:37:22.185644 4753 scope.go:117] "RemoveContainer" containerID="12a707423e20f5cecc5adbc25e31f6a7ce22c86fc1d25fd7a0194cc838461c3b" Dec 05 17:37:22 crc kubenswrapper[4753]: I1205 17:37:22.219270 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 17:37:22 crc kubenswrapper[4753]: I1205 17:37:22.222307 4753 scope.go:117] "RemoveContainer" containerID="d4c4b05743bc899ab80f5a44d444e162dd322a02fff7afe9f5fe461afc7187a6" Dec 05 17:37:22 crc kubenswrapper[4753]: E1205 17:37:22.222775 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d4c4b05743bc899ab80f5a44d444e162dd322a02fff7afe9f5fe461afc7187a6\": container with ID starting with d4c4b05743bc899ab80f5a44d444e162dd322a02fff7afe9f5fe461afc7187a6 not found: ID does not exist" containerID="d4c4b05743bc899ab80f5a44d444e162dd322a02fff7afe9f5fe461afc7187a6" Dec 05 17:37:22 crc kubenswrapper[4753]: I1205 17:37:22.222802 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d4c4b05743bc899ab80f5a44d444e162dd322a02fff7afe9f5fe461afc7187a6"} err="failed to get container status \"d4c4b05743bc899ab80f5a44d444e162dd322a02fff7afe9f5fe461afc7187a6\": rpc error: code = NotFound desc = could not find container \"d4c4b05743bc899ab80f5a44d444e162dd322a02fff7afe9f5fe461afc7187a6\": container with ID starting with d4c4b05743bc899ab80f5a44d444e162dd322a02fff7afe9f5fe461afc7187a6 not found: ID does not exist" Dec 05 17:37:22 crc kubenswrapper[4753]: I1205 17:37:22.222827 4753 scope.go:117] "RemoveContainer" containerID="12a707423e20f5cecc5adbc25e31f6a7ce22c86fc1d25fd7a0194cc838461c3b" Dec 05 17:37:22 crc kubenswrapper[4753]: E1205 17:37:22.223515 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"12a707423e20f5cecc5adbc25e31f6a7ce22c86fc1d25fd7a0194cc838461c3b\": container with ID starting with 12a707423e20f5cecc5adbc25e31f6a7ce22c86fc1d25fd7a0194cc838461c3b not found: ID does not exist" containerID="12a707423e20f5cecc5adbc25e31f6a7ce22c86fc1d25fd7a0194cc838461c3b" Dec 05 17:37:22 crc kubenswrapper[4753]: I1205 17:37:22.223560 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"12a707423e20f5cecc5adbc25e31f6a7ce22c86fc1d25fd7a0194cc838461c3b"} err="failed to get container status \"12a707423e20f5cecc5adbc25e31f6a7ce22c86fc1d25fd7a0194cc838461c3b\": rpc error: code = NotFound desc = could not find container \"12a707423e20f5cecc5adbc25e31f6a7ce22c86fc1d25fd7a0194cc838461c3b\": container with ID starting with 12a707423e20f5cecc5adbc25e31f6a7ce22c86fc1d25fd7a0194cc838461c3b not found: ID does not exist" Dec 05 17:37:22 crc kubenswrapper[4753]: I1205 17:37:22.226081 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 17:37:22 crc kubenswrapper[4753]: I1205 17:37:22.249441 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 05 17:37:22 crc kubenswrapper[4753]: E1205 17:37:22.250241 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="867981ea-803a-423e-ad91-115d899c9c6a" containerName="nova-metadata-metadata" Dec 05 17:37:22 crc kubenswrapper[4753]: I1205 17:37:22.250259 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="867981ea-803a-423e-ad91-115d899c9c6a" containerName="nova-metadata-metadata" Dec 05 17:37:22 crc kubenswrapper[4753]: E1205 17:37:22.250284 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="867981ea-803a-423e-ad91-115d899c9c6a" containerName="nova-metadata-log" Dec 05 17:37:22 crc kubenswrapper[4753]: I1205 17:37:22.250291 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="867981ea-803a-423e-ad91-115d899c9c6a" containerName="nova-metadata-log" Dec 05 17:37:22 crc kubenswrapper[4753]: I1205 17:37:22.250491 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="867981ea-803a-423e-ad91-115d899c9c6a" containerName="nova-metadata-log" Dec 05 17:37:22 crc kubenswrapper[4753]: I1205 17:37:22.250511 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="867981ea-803a-423e-ad91-115d899c9c6a" containerName="nova-metadata-metadata" Dec 05 17:37:22 crc kubenswrapper[4753]: I1205 17:37:22.251811 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 17:37:22 crc kubenswrapper[4753]: I1205 17:37:22.253958 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 05 17:37:22 crc kubenswrapper[4753]: I1205 17:37:22.254353 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 05 17:37:22 crc kubenswrapper[4753]: I1205 17:37:22.268807 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 17:37:22 crc kubenswrapper[4753]: I1205 17:37:22.357288 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/f56a1bea-c258-4ed6-a43c-2d006aaa4a23-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"f56a1bea-c258-4ed6-a43c-2d006aaa4a23\") " pod="openstack/nova-metadata-0" Dec 05 17:37:22 crc kubenswrapper[4753]: I1205 17:37:22.357373 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f56a1bea-c258-4ed6-a43c-2d006aaa4a23-logs\") pod \"nova-metadata-0\" (UID: \"f56a1bea-c258-4ed6-a43c-2d006aaa4a23\") " pod="openstack/nova-metadata-0" Dec 05 17:37:22 crc kubenswrapper[4753]: I1205 17:37:22.357593 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f56a1bea-c258-4ed6-a43c-2d006aaa4a23-config-data\") pod \"nova-metadata-0\" (UID: \"f56a1bea-c258-4ed6-a43c-2d006aaa4a23\") " pod="openstack/nova-metadata-0" Dec 05 17:37:22 crc kubenswrapper[4753]: I1205 17:37:22.357659 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f56a1bea-c258-4ed6-a43c-2d006aaa4a23-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"f56a1bea-c258-4ed6-a43c-2d006aaa4a23\") " pod="openstack/nova-metadata-0" Dec 05 17:37:22 crc kubenswrapper[4753]: I1205 17:37:22.357889 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dqbwv\" (UniqueName: \"kubernetes.io/projected/f56a1bea-c258-4ed6-a43c-2d006aaa4a23-kube-api-access-dqbwv\") pod \"nova-metadata-0\" (UID: \"f56a1bea-c258-4ed6-a43c-2d006aaa4a23\") " pod="openstack/nova-metadata-0" Dec 05 17:37:22 crc kubenswrapper[4753]: I1205 17:37:22.460067 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f56a1bea-c258-4ed6-a43c-2d006aaa4a23-config-data\") pod \"nova-metadata-0\" (UID: \"f56a1bea-c258-4ed6-a43c-2d006aaa4a23\") " pod="openstack/nova-metadata-0" Dec 05 17:37:22 crc kubenswrapper[4753]: I1205 17:37:22.460130 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f56a1bea-c258-4ed6-a43c-2d006aaa4a23-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"f56a1bea-c258-4ed6-a43c-2d006aaa4a23\") " pod="openstack/nova-metadata-0" Dec 05 17:37:22 crc kubenswrapper[4753]: I1205 17:37:22.460218 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dqbwv\" (UniqueName: \"kubernetes.io/projected/f56a1bea-c258-4ed6-a43c-2d006aaa4a23-kube-api-access-dqbwv\") pod \"nova-metadata-0\" (UID: \"f56a1bea-c258-4ed6-a43c-2d006aaa4a23\") " pod="openstack/nova-metadata-0" Dec 05 17:37:22 crc kubenswrapper[4753]: I1205 17:37:22.460273 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/f56a1bea-c258-4ed6-a43c-2d006aaa4a23-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"f56a1bea-c258-4ed6-a43c-2d006aaa4a23\") " pod="openstack/nova-metadata-0" Dec 05 17:37:22 crc kubenswrapper[4753]: I1205 17:37:22.460316 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f56a1bea-c258-4ed6-a43c-2d006aaa4a23-logs\") pod \"nova-metadata-0\" (UID: \"f56a1bea-c258-4ed6-a43c-2d006aaa4a23\") " pod="openstack/nova-metadata-0" Dec 05 17:37:22 crc kubenswrapper[4753]: I1205 17:37:22.460920 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f56a1bea-c258-4ed6-a43c-2d006aaa4a23-logs\") pod \"nova-metadata-0\" (UID: \"f56a1bea-c258-4ed6-a43c-2d006aaa4a23\") " pod="openstack/nova-metadata-0" Dec 05 17:37:22 crc kubenswrapper[4753]: I1205 17:37:22.465118 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f56a1bea-c258-4ed6-a43c-2d006aaa4a23-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"f56a1bea-c258-4ed6-a43c-2d006aaa4a23\") " pod="openstack/nova-metadata-0" Dec 05 17:37:22 crc kubenswrapper[4753]: I1205 17:37:22.465510 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/f56a1bea-c258-4ed6-a43c-2d006aaa4a23-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"f56a1bea-c258-4ed6-a43c-2d006aaa4a23\") " pod="openstack/nova-metadata-0" Dec 05 17:37:22 crc kubenswrapper[4753]: I1205 17:37:22.465777 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f56a1bea-c258-4ed6-a43c-2d006aaa4a23-config-data\") pod \"nova-metadata-0\" (UID: \"f56a1bea-c258-4ed6-a43c-2d006aaa4a23\") " pod="openstack/nova-metadata-0" Dec 05 17:37:22 crc kubenswrapper[4753]: I1205 17:37:22.474776 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dqbwv\" (UniqueName: \"kubernetes.io/projected/f56a1bea-c258-4ed6-a43c-2d006aaa4a23-kube-api-access-dqbwv\") pod \"nova-metadata-0\" (UID: \"f56a1bea-c258-4ed6-a43c-2d006aaa4a23\") " pod="openstack/nova-metadata-0" Dec 05 17:37:22 crc kubenswrapper[4753]: I1205 17:37:22.570302 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 17:37:22 crc kubenswrapper[4753]: I1205 17:37:22.722354 4753 scope.go:117] "RemoveContainer" containerID="87682a74661e693e498cd793cc20d16fc9f4a3b8a1a6b54f10285e2dcd15eafd" Dec 05 17:37:22 crc kubenswrapper[4753]: E1205 17:37:22.722866 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 17:37:23 crc kubenswrapper[4753]: I1205 17:37:23.107879 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 17:37:23 crc kubenswrapper[4753]: I1205 17:37:23.201585 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"1f6bb960-f1d1-413e-bd11-aa0d1251135f","Type":"ContainerStarted","Data":"65fb2a5ee8af093d94ae6b125aae231a4fb5c47fc748018ad2123c63c85bf7fb"} Dec 05 17:37:23 crc kubenswrapper[4753]: I1205 17:37:23.204107 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f56a1bea-c258-4ed6-a43c-2d006aaa4a23","Type":"ContainerStarted","Data":"85ad0430c0a04c171b15e80f8e395a2ef46bace12546e91cf96dde58d97e8533"} Dec 05 17:37:23 crc kubenswrapper[4753]: I1205 17:37:23.230070 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.230053374 podStartE2EDuration="2.230053374s" podCreationTimestamp="2025-12-05 17:37:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:37:23.223700264 +0000 UTC m=+1981.726807290" watchObservedRunningTime="2025-12-05 17:37:23.230053374 +0000 UTC m=+1981.733160380" Dec 05 17:37:23 crc kubenswrapper[4753]: I1205 17:37:23.734780 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="867981ea-803a-423e-ad91-115d899c9c6a" path="/var/lib/kubelet/pods/867981ea-803a-423e-ad91-115d899c9c6a/volumes" Dec 05 17:37:24 crc kubenswrapper[4753]: I1205 17:37:24.214201 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f56a1bea-c258-4ed6-a43c-2d006aaa4a23","Type":"ContainerStarted","Data":"e682ddf1c0d62ccf2d0542e992f09c1894b6a892a958e36c5d80027f93b72448"} Dec 05 17:37:24 crc kubenswrapper[4753]: I1205 17:37:24.214246 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"f56a1bea-c258-4ed6-a43c-2d006aaa4a23","Type":"ContainerStarted","Data":"e084ad7581c35397ca6e9c47b0e983fea497e73b668601468c0a1e9bf00510a2"} Dec 05 17:37:24 crc kubenswrapper[4753]: I1205 17:37:24.241471 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.241453863 podStartE2EDuration="2.241453863s" podCreationTimestamp="2025-12-05 17:37:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:37:24.237614894 +0000 UTC m=+1982.740721900" watchObservedRunningTime="2025-12-05 17:37:24.241453863 +0000 UTC m=+1982.744560869" Dec 05 17:37:25 crc kubenswrapper[4753]: I1205 17:37:25.228815 4753 generic.go:334] "Generic (PLEG): container finished" podID="3163f6c9-2070-4787-9b8b-d89d2d29a27c" containerID="1cfc1cf52ac40ee3d8b792343ac268277c29047046b61adb1a664a8eaa1dbcbf" exitCode=0 Dec 05 17:37:25 crc kubenswrapper[4753]: I1205 17:37:25.228915 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3163f6c9-2070-4787-9b8b-d89d2d29a27c","Type":"ContainerDied","Data":"1cfc1cf52ac40ee3d8b792343ac268277c29047046b61adb1a664a8eaa1dbcbf"} Dec 05 17:37:25 crc kubenswrapper[4753]: I1205 17:37:25.229401 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3163f6c9-2070-4787-9b8b-d89d2d29a27c","Type":"ContainerDied","Data":"1cead7f63f5b8a25ec8eb804903af3e9af70aa5a9c8c7d738213371e397fc118"} Dec 05 17:37:25 crc kubenswrapper[4753]: I1205 17:37:25.229428 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1cead7f63f5b8a25ec8eb804903af3e9af70aa5a9c8c7d738213371e397fc118" Dec 05 17:37:25 crc kubenswrapper[4753]: I1205 17:37:25.303355 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 17:37:25 crc kubenswrapper[4753]: I1205 17:37:25.432852 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3163f6c9-2070-4787-9b8b-d89d2d29a27c-public-tls-certs\") pod \"3163f6c9-2070-4787-9b8b-d89d2d29a27c\" (UID: \"3163f6c9-2070-4787-9b8b-d89d2d29a27c\") " Dec 05 17:37:25 crc kubenswrapper[4753]: I1205 17:37:25.432899 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3163f6c9-2070-4787-9b8b-d89d2d29a27c-combined-ca-bundle\") pod \"3163f6c9-2070-4787-9b8b-d89d2d29a27c\" (UID: \"3163f6c9-2070-4787-9b8b-d89d2d29a27c\") " Dec 05 17:37:25 crc kubenswrapper[4753]: I1205 17:37:25.432933 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t7fsp\" (UniqueName: \"kubernetes.io/projected/3163f6c9-2070-4787-9b8b-d89d2d29a27c-kube-api-access-t7fsp\") pod \"3163f6c9-2070-4787-9b8b-d89d2d29a27c\" (UID: \"3163f6c9-2070-4787-9b8b-d89d2d29a27c\") " Dec 05 17:37:25 crc kubenswrapper[4753]: I1205 17:37:25.433011 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3163f6c9-2070-4787-9b8b-d89d2d29a27c-config-data\") pod \"3163f6c9-2070-4787-9b8b-d89d2d29a27c\" (UID: \"3163f6c9-2070-4787-9b8b-d89d2d29a27c\") " Dec 05 17:37:25 crc kubenswrapper[4753]: I1205 17:37:25.433902 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3163f6c9-2070-4787-9b8b-d89d2d29a27c-logs\") pod \"3163f6c9-2070-4787-9b8b-d89d2d29a27c\" (UID: \"3163f6c9-2070-4787-9b8b-d89d2d29a27c\") " Dec 05 17:37:25 crc kubenswrapper[4753]: I1205 17:37:25.434117 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3163f6c9-2070-4787-9b8b-d89d2d29a27c-internal-tls-certs\") pod \"3163f6c9-2070-4787-9b8b-d89d2d29a27c\" (UID: \"3163f6c9-2070-4787-9b8b-d89d2d29a27c\") " Dec 05 17:37:25 crc kubenswrapper[4753]: I1205 17:37:25.434532 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3163f6c9-2070-4787-9b8b-d89d2d29a27c-logs" (OuterVolumeSpecName: "logs") pod "3163f6c9-2070-4787-9b8b-d89d2d29a27c" (UID: "3163f6c9-2070-4787-9b8b-d89d2d29a27c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:37:25 crc kubenswrapper[4753]: I1205 17:37:25.435696 4753 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3163f6c9-2070-4787-9b8b-d89d2d29a27c-logs\") on node \"crc\" DevicePath \"\"" Dec 05 17:37:25 crc kubenswrapper[4753]: I1205 17:37:25.446860 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3163f6c9-2070-4787-9b8b-d89d2d29a27c-kube-api-access-t7fsp" (OuterVolumeSpecName: "kube-api-access-t7fsp") pod "3163f6c9-2070-4787-9b8b-d89d2d29a27c" (UID: "3163f6c9-2070-4787-9b8b-d89d2d29a27c"). InnerVolumeSpecName "kube-api-access-t7fsp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:37:25 crc kubenswrapper[4753]: I1205 17:37:25.465361 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3163f6c9-2070-4787-9b8b-d89d2d29a27c-config-data" (OuterVolumeSpecName: "config-data") pod "3163f6c9-2070-4787-9b8b-d89d2d29a27c" (UID: "3163f6c9-2070-4787-9b8b-d89d2d29a27c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:37:25 crc kubenswrapper[4753]: I1205 17:37:25.473516 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3163f6c9-2070-4787-9b8b-d89d2d29a27c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3163f6c9-2070-4787-9b8b-d89d2d29a27c" (UID: "3163f6c9-2070-4787-9b8b-d89d2d29a27c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:37:25 crc kubenswrapper[4753]: I1205 17:37:25.490400 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3163f6c9-2070-4787-9b8b-d89d2d29a27c-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "3163f6c9-2070-4787-9b8b-d89d2d29a27c" (UID: "3163f6c9-2070-4787-9b8b-d89d2d29a27c"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:37:25 crc kubenswrapper[4753]: I1205 17:37:25.496390 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3163f6c9-2070-4787-9b8b-d89d2d29a27c-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "3163f6c9-2070-4787-9b8b-d89d2d29a27c" (UID: "3163f6c9-2070-4787-9b8b-d89d2d29a27c"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:37:25 crc kubenswrapper[4753]: I1205 17:37:25.537554 4753 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3163f6c9-2070-4787-9b8b-d89d2d29a27c-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 17:37:25 crc kubenswrapper[4753]: I1205 17:37:25.537600 4753 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3163f6c9-2070-4787-9b8b-d89d2d29a27c-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 17:37:25 crc kubenswrapper[4753]: I1205 17:37:25.537615 4753 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3163f6c9-2070-4787-9b8b-d89d2d29a27c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:37:25 crc kubenswrapper[4753]: I1205 17:37:25.537627 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t7fsp\" (UniqueName: \"kubernetes.io/projected/3163f6c9-2070-4787-9b8b-d89d2d29a27c-kube-api-access-t7fsp\") on node \"crc\" DevicePath \"\"" Dec 05 17:37:25 crc kubenswrapper[4753]: I1205 17:37:25.537640 4753 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3163f6c9-2070-4787-9b8b-d89d2d29a27c-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:37:26 crc kubenswrapper[4753]: I1205 17:37:26.238457 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 17:37:26 crc kubenswrapper[4753]: I1205 17:37:26.274675 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 17:37:26 crc kubenswrapper[4753]: I1205 17:37:26.298485 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 05 17:37:26 crc kubenswrapper[4753]: I1205 17:37:26.322828 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 05 17:37:26 crc kubenswrapper[4753]: E1205 17:37:26.323487 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3163f6c9-2070-4787-9b8b-d89d2d29a27c" containerName="nova-api-log" Dec 05 17:37:26 crc kubenswrapper[4753]: I1205 17:37:26.323510 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="3163f6c9-2070-4787-9b8b-d89d2d29a27c" containerName="nova-api-log" Dec 05 17:37:26 crc kubenswrapper[4753]: E1205 17:37:26.323532 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3163f6c9-2070-4787-9b8b-d89d2d29a27c" containerName="nova-api-api" Dec 05 17:37:26 crc kubenswrapper[4753]: I1205 17:37:26.323540 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="3163f6c9-2070-4787-9b8b-d89d2d29a27c" containerName="nova-api-api" Dec 05 17:37:26 crc kubenswrapper[4753]: I1205 17:37:26.323738 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="3163f6c9-2070-4787-9b8b-d89d2d29a27c" containerName="nova-api-log" Dec 05 17:37:26 crc kubenswrapper[4753]: I1205 17:37:26.323758 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="3163f6c9-2070-4787-9b8b-d89d2d29a27c" containerName="nova-api-api" Dec 05 17:37:26 crc kubenswrapper[4753]: I1205 17:37:26.328643 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 17:37:26 crc kubenswrapper[4753]: I1205 17:37:26.333515 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 05 17:37:26 crc kubenswrapper[4753]: I1205 17:37:26.333559 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 05 17:37:26 crc kubenswrapper[4753]: I1205 17:37:26.336419 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 05 17:37:26 crc kubenswrapper[4753]: I1205 17:37:26.390265 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 17:37:26 crc kubenswrapper[4753]: I1205 17:37:26.456521 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3aaf728a-8d40-4b45-9f79-a5bb36ee9a57-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"3aaf728a-8d40-4b45-9f79-a5bb36ee9a57\") " pod="openstack/nova-api-0" Dec 05 17:37:26 crc kubenswrapper[4753]: I1205 17:37:26.456583 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3aaf728a-8d40-4b45-9f79-a5bb36ee9a57-config-data\") pod \"nova-api-0\" (UID: \"3aaf728a-8d40-4b45-9f79-a5bb36ee9a57\") " pod="openstack/nova-api-0" Dec 05 17:37:26 crc kubenswrapper[4753]: I1205 17:37:26.456609 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3aaf728a-8d40-4b45-9f79-a5bb36ee9a57-internal-tls-certs\") pod \"nova-api-0\" (UID: \"3aaf728a-8d40-4b45-9f79-a5bb36ee9a57\") " pod="openstack/nova-api-0" Dec 05 17:37:26 crc kubenswrapper[4753]: I1205 17:37:26.456675 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3aaf728a-8d40-4b45-9f79-a5bb36ee9a57-logs\") pod \"nova-api-0\" (UID: \"3aaf728a-8d40-4b45-9f79-a5bb36ee9a57\") " pod="openstack/nova-api-0" Dec 05 17:37:26 crc kubenswrapper[4753]: I1205 17:37:26.456712 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3aaf728a-8d40-4b45-9f79-a5bb36ee9a57-public-tls-certs\") pod \"nova-api-0\" (UID: \"3aaf728a-8d40-4b45-9f79-a5bb36ee9a57\") " pod="openstack/nova-api-0" Dec 05 17:37:26 crc kubenswrapper[4753]: I1205 17:37:26.456782 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xc648\" (UniqueName: \"kubernetes.io/projected/3aaf728a-8d40-4b45-9f79-a5bb36ee9a57-kube-api-access-xc648\") pod \"nova-api-0\" (UID: \"3aaf728a-8d40-4b45-9f79-a5bb36ee9a57\") " pod="openstack/nova-api-0" Dec 05 17:37:26 crc kubenswrapper[4753]: I1205 17:37:26.539632 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 05 17:37:26 crc kubenswrapper[4753]: I1205 17:37:26.558938 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3aaf728a-8d40-4b45-9f79-a5bb36ee9a57-public-tls-certs\") pod \"nova-api-0\" (UID: \"3aaf728a-8d40-4b45-9f79-a5bb36ee9a57\") " pod="openstack/nova-api-0" Dec 05 17:37:26 crc kubenswrapper[4753]: I1205 17:37:26.559072 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xc648\" (UniqueName: \"kubernetes.io/projected/3aaf728a-8d40-4b45-9f79-a5bb36ee9a57-kube-api-access-xc648\") pod \"nova-api-0\" (UID: \"3aaf728a-8d40-4b45-9f79-a5bb36ee9a57\") " pod="openstack/nova-api-0" Dec 05 17:37:26 crc kubenswrapper[4753]: I1205 17:37:26.559166 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3aaf728a-8d40-4b45-9f79-a5bb36ee9a57-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"3aaf728a-8d40-4b45-9f79-a5bb36ee9a57\") " pod="openstack/nova-api-0" Dec 05 17:37:26 crc kubenswrapper[4753]: I1205 17:37:26.559199 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3aaf728a-8d40-4b45-9f79-a5bb36ee9a57-config-data\") pod \"nova-api-0\" (UID: \"3aaf728a-8d40-4b45-9f79-a5bb36ee9a57\") " pod="openstack/nova-api-0" Dec 05 17:37:26 crc kubenswrapper[4753]: I1205 17:37:26.559247 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3aaf728a-8d40-4b45-9f79-a5bb36ee9a57-internal-tls-certs\") pod \"nova-api-0\" (UID: \"3aaf728a-8d40-4b45-9f79-a5bb36ee9a57\") " pod="openstack/nova-api-0" Dec 05 17:37:26 crc kubenswrapper[4753]: I1205 17:37:26.559315 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3aaf728a-8d40-4b45-9f79-a5bb36ee9a57-logs\") pod \"nova-api-0\" (UID: \"3aaf728a-8d40-4b45-9f79-a5bb36ee9a57\") " pod="openstack/nova-api-0" Dec 05 17:37:26 crc kubenswrapper[4753]: I1205 17:37:26.559872 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3aaf728a-8d40-4b45-9f79-a5bb36ee9a57-logs\") pod \"nova-api-0\" (UID: \"3aaf728a-8d40-4b45-9f79-a5bb36ee9a57\") " pod="openstack/nova-api-0" Dec 05 17:37:26 crc kubenswrapper[4753]: I1205 17:37:26.563807 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3aaf728a-8d40-4b45-9f79-a5bb36ee9a57-config-data\") pod \"nova-api-0\" (UID: \"3aaf728a-8d40-4b45-9f79-a5bb36ee9a57\") " pod="openstack/nova-api-0" Dec 05 17:37:26 crc kubenswrapper[4753]: I1205 17:37:26.563844 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3aaf728a-8d40-4b45-9f79-a5bb36ee9a57-internal-tls-certs\") pod \"nova-api-0\" (UID: \"3aaf728a-8d40-4b45-9f79-a5bb36ee9a57\") " pod="openstack/nova-api-0" Dec 05 17:37:26 crc kubenswrapper[4753]: I1205 17:37:26.563964 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3aaf728a-8d40-4b45-9f79-a5bb36ee9a57-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"3aaf728a-8d40-4b45-9f79-a5bb36ee9a57\") " pod="openstack/nova-api-0" Dec 05 17:37:26 crc kubenswrapper[4753]: I1205 17:37:26.564412 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3aaf728a-8d40-4b45-9f79-a5bb36ee9a57-public-tls-certs\") pod \"nova-api-0\" (UID: \"3aaf728a-8d40-4b45-9f79-a5bb36ee9a57\") " pod="openstack/nova-api-0" Dec 05 17:37:26 crc kubenswrapper[4753]: I1205 17:37:26.580310 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xc648\" (UniqueName: \"kubernetes.io/projected/3aaf728a-8d40-4b45-9f79-a5bb36ee9a57-kube-api-access-xc648\") pod \"nova-api-0\" (UID: \"3aaf728a-8d40-4b45-9f79-a5bb36ee9a57\") " pod="openstack/nova-api-0" Dec 05 17:37:26 crc kubenswrapper[4753]: I1205 17:37:26.656749 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 17:37:27 crc kubenswrapper[4753]: W1205 17:37:27.164850 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3aaf728a_8d40_4b45_9f79_a5bb36ee9a57.slice/crio-35ecac3c1016d82f761c4efaa731dce62ac8c326aac058ba1fdc007da83312ca WatchSource:0}: Error finding container 35ecac3c1016d82f761c4efaa731dce62ac8c326aac058ba1fdc007da83312ca: Status 404 returned error can't find the container with id 35ecac3c1016d82f761c4efaa731dce62ac8c326aac058ba1fdc007da83312ca Dec 05 17:37:27 crc kubenswrapper[4753]: I1205 17:37:27.177677 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 17:37:27 crc kubenswrapper[4753]: I1205 17:37:27.298176 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3aaf728a-8d40-4b45-9f79-a5bb36ee9a57","Type":"ContainerStarted","Data":"35ecac3c1016d82f761c4efaa731dce62ac8c326aac058ba1fdc007da83312ca"} Dec 05 17:37:27 crc kubenswrapper[4753]: I1205 17:37:27.572036 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 17:37:27 crc kubenswrapper[4753]: I1205 17:37:27.572350 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 17:37:27 crc kubenswrapper[4753]: I1205 17:37:27.735107 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3163f6c9-2070-4787-9b8b-d89d2d29a27c" path="/var/lib/kubelet/pods/3163f6c9-2070-4787-9b8b-d89d2d29a27c/volumes" Dec 05 17:37:28 crc kubenswrapper[4753]: I1205 17:37:28.312187 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3aaf728a-8d40-4b45-9f79-a5bb36ee9a57","Type":"ContainerStarted","Data":"5c23f23c039c8291db9f034bd984448e7c428ec159d5507ffc00e04d8563e5ee"} Dec 05 17:37:28 crc kubenswrapper[4753]: I1205 17:37:28.312239 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3aaf728a-8d40-4b45-9f79-a5bb36ee9a57","Type":"ContainerStarted","Data":"2fdd891a4647c3c367004243d998977cf88dea7c7c8c632d3acb982d25878747"} Dec 05 17:37:28 crc kubenswrapper[4753]: I1205 17:37:28.330300 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.3302796089999998 podStartE2EDuration="2.330279609s" podCreationTimestamp="2025-12-05 17:37:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:37:28.329375003 +0000 UTC m=+1986.832482009" watchObservedRunningTime="2025-12-05 17:37:28.330279609 +0000 UTC m=+1986.833386615" Dec 05 17:37:31 crc kubenswrapper[4753]: I1205 17:37:31.539520 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 05 17:37:31 crc kubenswrapper[4753]: I1205 17:37:31.597072 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 05 17:37:32 crc kubenswrapper[4753]: I1205 17:37:32.400779 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 05 17:37:32 crc kubenswrapper[4753]: I1205 17:37:32.571735 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 05 17:37:32 crc kubenswrapper[4753]: I1205 17:37:32.571784 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 05 17:37:33 crc kubenswrapper[4753]: I1205 17:37:33.586308 4753 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="f56a1bea-c258-4ed6-a43c-2d006aaa4a23" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.231:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 17:37:33 crc kubenswrapper[4753]: I1205 17:37:33.586308 4753 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="f56a1bea-c258-4ed6-a43c-2d006aaa4a23" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.231:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 17:37:35 crc kubenswrapper[4753]: I1205 17:37:35.721881 4753 scope.go:117] "RemoveContainer" containerID="87682a74661e693e498cd793cc20d16fc9f4a3b8a1a6b54f10285e2dcd15eafd" Dec 05 17:37:36 crc kubenswrapper[4753]: I1205 17:37:36.402216 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" event={"ID":"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68","Type":"ContainerStarted","Data":"991d8322b4cfd6073d1f5ac6852a07230df5501090d0dc07c17c7fbcde13dc9e"} Dec 05 17:37:36 crc kubenswrapper[4753]: I1205 17:37:36.660822 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 17:37:36 crc kubenswrapper[4753]: I1205 17:37:36.660907 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 17:37:37 crc kubenswrapper[4753]: I1205 17:37:37.674340 4753 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="3aaf728a-8d40-4b45-9f79-a5bb36ee9a57" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.232:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 17:37:37 crc kubenswrapper[4753]: I1205 17:37:37.674387 4753 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="3aaf728a-8d40-4b45-9f79-a5bb36ee9a57" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.232:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 17:37:40 crc kubenswrapper[4753]: I1205 17:37:40.363786 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 05 17:37:42 crc kubenswrapper[4753]: I1205 17:37:42.578206 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 05 17:37:42 crc kubenswrapper[4753]: I1205 17:37:42.581613 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 05 17:37:42 crc kubenswrapper[4753]: I1205 17:37:42.583531 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 05 17:37:43 crc kubenswrapper[4753]: I1205 17:37:43.488256 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 05 17:37:46 crc kubenswrapper[4753]: I1205 17:37:46.664224 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 05 17:37:46 crc kubenswrapper[4753]: I1205 17:37:46.665449 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 05 17:37:46 crc kubenswrapper[4753]: I1205 17:37:46.680467 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 05 17:37:46 crc kubenswrapper[4753]: I1205 17:37:46.739662 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 05 17:37:47 crc kubenswrapper[4753]: I1205 17:37:47.534266 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 05 17:37:47 crc kubenswrapper[4753]: I1205 17:37:47.545650 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 05 17:37:57 crc kubenswrapper[4753]: I1205 17:37:57.850201 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-db-sync-9vsjw"] Dec 05 17:37:57 crc kubenswrapper[4753]: I1205 17:37:57.861093 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cloudkitty-db-sync-9vsjw"] Dec 05 17:37:57 crc kubenswrapper[4753]: I1205 17:37:57.940323 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-db-sync-9mzx8"] Dec 05 17:37:57 crc kubenswrapper[4753]: I1205 17:37:57.941710 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-sync-9mzx8" Dec 05 17:37:57 crc kubenswrapper[4753]: I1205 17:37:57.947088 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 05 17:37:57 crc kubenswrapper[4753]: I1205 17:37:57.953833 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-db-sync-9mzx8"] Dec 05 17:37:57 crc kubenswrapper[4753]: I1205 17:37:57.996947 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5xrdb\" (UniqueName: \"kubernetes.io/projected/d52c3870-a8b0-43f1-ac03-2e7c0015a5f7-kube-api-access-5xrdb\") pod \"cloudkitty-db-sync-9mzx8\" (UID: \"d52c3870-a8b0-43f1-ac03-2e7c0015a5f7\") " pod="openstack/cloudkitty-db-sync-9mzx8" Dec 05 17:37:57 crc kubenswrapper[4753]: I1205 17:37:57.997038 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/d52c3870-a8b0-43f1-ac03-2e7c0015a5f7-certs\") pod \"cloudkitty-db-sync-9mzx8\" (UID: \"d52c3870-a8b0-43f1-ac03-2e7c0015a5f7\") " pod="openstack/cloudkitty-db-sync-9mzx8" Dec 05 17:37:57 crc kubenswrapper[4753]: I1205 17:37:57.997072 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d52c3870-a8b0-43f1-ac03-2e7c0015a5f7-scripts\") pod \"cloudkitty-db-sync-9mzx8\" (UID: \"d52c3870-a8b0-43f1-ac03-2e7c0015a5f7\") " pod="openstack/cloudkitty-db-sync-9mzx8" Dec 05 17:37:57 crc kubenswrapper[4753]: I1205 17:37:57.997139 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d52c3870-a8b0-43f1-ac03-2e7c0015a5f7-config-data\") pod \"cloudkitty-db-sync-9mzx8\" (UID: \"d52c3870-a8b0-43f1-ac03-2e7c0015a5f7\") " pod="openstack/cloudkitty-db-sync-9mzx8" Dec 05 17:37:57 crc kubenswrapper[4753]: I1205 17:37:57.997253 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d52c3870-a8b0-43f1-ac03-2e7c0015a5f7-combined-ca-bundle\") pod \"cloudkitty-db-sync-9mzx8\" (UID: \"d52c3870-a8b0-43f1-ac03-2e7c0015a5f7\") " pod="openstack/cloudkitty-db-sync-9mzx8" Dec 05 17:37:58 crc kubenswrapper[4753]: I1205 17:37:58.098877 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d52c3870-a8b0-43f1-ac03-2e7c0015a5f7-combined-ca-bundle\") pod \"cloudkitty-db-sync-9mzx8\" (UID: \"d52c3870-a8b0-43f1-ac03-2e7c0015a5f7\") " pod="openstack/cloudkitty-db-sync-9mzx8" Dec 05 17:37:58 crc kubenswrapper[4753]: I1205 17:37:58.098992 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5xrdb\" (UniqueName: \"kubernetes.io/projected/d52c3870-a8b0-43f1-ac03-2e7c0015a5f7-kube-api-access-5xrdb\") pod \"cloudkitty-db-sync-9mzx8\" (UID: \"d52c3870-a8b0-43f1-ac03-2e7c0015a5f7\") " pod="openstack/cloudkitty-db-sync-9mzx8" Dec 05 17:37:58 crc kubenswrapper[4753]: I1205 17:37:58.099060 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/d52c3870-a8b0-43f1-ac03-2e7c0015a5f7-certs\") pod \"cloudkitty-db-sync-9mzx8\" (UID: \"d52c3870-a8b0-43f1-ac03-2e7c0015a5f7\") " pod="openstack/cloudkitty-db-sync-9mzx8" Dec 05 17:37:58 crc kubenswrapper[4753]: I1205 17:37:58.099083 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d52c3870-a8b0-43f1-ac03-2e7c0015a5f7-scripts\") pod \"cloudkitty-db-sync-9mzx8\" (UID: \"d52c3870-a8b0-43f1-ac03-2e7c0015a5f7\") " pod="openstack/cloudkitty-db-sync-9mzx8" Dec 05 17:37:58 crc kubenswrapper[4753]: I1205 17:37:58.099143 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d52c3870-a8b0-43f1-ac03-2e7c0015a5f7-config-data\") pod \"cloudkitty-db-sync-9mzx8\" (UID: \"d52c3870-a8b0-43f1-ac03-2e7c0015a5f7\") " pod="openstack/cloudkitty-db-sync-9mzx8" Dec 05 17:37:58 crc kubenswrapper[4753]: I1205 17:37:58.106811 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d52c3870-a8b0-43f1-ac03-2e7c0015a5f7-scripts\") pod \"cloudkitty-db-sync-9mzx8\" (UID: \"d52c3870-a8b0-43f1-ac03-2e7c0015a5f7\") " pod="openstack/cloudkitty-db-sync-9mzx8" Dec 05 17:37:58 crc kubenswrapper[4753]: I1205 17:37:58.107487 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d52c3870-a8b0-43f1-ac03-2e7c0015a5f7-config-data\") pod \"cloudkitty-db-sync-9mzx8\" (UID: \"d52c3870-a8b0-43f1-ac03-2e7c0015a5f7\") " pod="openstack/cloudkitty-db-sync-9mzx8" Dec 05 17:37:58 crc kubenswrapper[4753]: I1205 17:37:58.107726 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d52c3870-a8b0-43f1-ac03-2e7c0015a5f7-combined-ca-bundle\") pod \"cloudkitty-db-sync-9mzx8\" (UID: \"d52c3870-a8b0-43f1-ac03-2e7c0015a5f7\") " pod="openstack/cloudkitty-db-sync-9mzx8" Dec 05 17:37:58 crc kubenswrapper[4753]: I1205 17:37:58.113389 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/projected/d52c3870-a8b0-43f1-ac03-2e7c0015a5f7-certs\") pod \"cloudkitty-db-sync-9mzx8\" (UID: \"d52c3870-a8b0-43f1-ac03-2e7c0015a5f7\") " pod="openstack/cloudkitty-db-sync-9mzx8" Dec 05 17:37:58 crc kubenswrapper[4753]: I1205 17:37:58.119218 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5xrdb\" (UniqueName: \"kubernetes.io/projected/d52c3870-a8b0-43f1-ac03-2e7c0015a5f7-kube-api-access-5xrdb\") pod \"cloudkitty-db-sync-9mzx8\" (UID: \"d52c3870-a8b0-43f1-ac03-2e7c0015a5f7\") " pod="openstack/cloudkitty-db-sync-9mzx8" Dec 05 17:37:58 crc kubenswrapper[4753]: I1205 17:37:58.260551 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-sync-9mzx8" Dec 05 17:37:58 crc kubenswrapper[4753]: W1205 17:37:58.777305 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd52c3870_a8b0_43f1_ac03_2e7c0015a5f7.slice/crio-3f4792723a1568baa512ca4bba6c954156e961aca55d171bc184ae10fde10017 WatchSource:0}: Error finding container 3f4792723a1568baa512ca4bba6c954156e961aca55d171bc184ae10fde10017: Status 404 returned error can't find the container with id 3f4792723a1568baa512ca4bba6c954156e961aca55d171bc184ae10fde10017 Dec 05 17:37:58 crc kubenswrapper[4753]: I1205 17:37:58.779772 4753 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 17:37:58 crc kubenswrapper[4753]: I1205 17:37:58.784197 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-db-sync-9mzx8"] Dec 05 17:37:59 crc kubenswrapper[4753]: I1205 17:37:59.453995 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 17:37:59 crc kubenswrapper[4753]: I1205 17:37:59.697889 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-sync-9mzx8" event={"ID":"d52c3870-a8b0-43f1-ac03-2e7c0015a5f7","Type":"ContainerStarted","Data":"3f4792723a1568baa512ca4bba6c954156e961aca55d171bc184ae10fde10017"} Dec 05 17:37:59 crc kubenswrapper[4753]: I1205 17:37:59.761704 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f5711686-6b64-450f-b2b4-6583dab08275" path="/var/lib/kubelet/pods/f5711686-6b64-450f-b2b4-6583dab08275/volumes" Dec 05 17:38:00 crc kubenswrapper[4753]: I1205 17:38:00.157883 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:38:00 crc kubenswrapper[4753]: I1205 17:38:00.158532 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ef421b3a-61a8-4d2b-a898-caff08ee8231" containerName="ceilometer-central-agent" containerID="cri-o://0ebb2942cb3a18e5a6a3c057ac9cbe41ceabad9e6b89abc171423ac25c0cb082" gracePeriod=30 Dec 05 17:38:00 crc kubenswrapper[4753]: I1205 17:38:00.159049 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ef421b3a-61a8-4d2b-a898-caff08ee8231" containerName="proxy-httpd" containerID="cri-o://baf097a164a82f254af442ebcd5509650b111625cf07894894ad06f8a98a17d9" gracePeriod=30 Dec 05 17:38:00 crc kubenswrapper[4753]: I1205 17:38:00.159117 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ef421b3a-61a8-4d2b-a898-caff08ee8231" containerName="sg-core" containerID="cri-o://24b507fc9cf7f1035986a1b120d9a3554238340c74c54783118d9180f2548a9d" gracePeriod=30 Dec 05 17:38:00 crc kubenswrapper[4753]: I1205 17:38:00.159187 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ef421b3a-61a8-4d2b-a898-caff08ee8231" containerName="ceilometer-notification-agent" containerID="cri-o://585991e5822f2e5746af935e2679c1def9a620efdf897d3f8baebcc742703d25" gracePeriod=30 Dec 05 17:38:00 crc kubenswrapper[4753]: I1205 17:38:00.476799 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 17:38:00 crc kubenswrapper[4753]: I1205 17:38:00.717773 4753 generic.go:334] "Generic (PLEG): container finished" podID="ef421b3a-61a8-4d2b-a898-caff08ee8231" containerID="baf097a164a82f254af442ebcd5509650b111625cf07894894ad06f8a98a17d9" exitCode=0 Dec 05 17:38:00 crc kubenswrapper[4753]: I1205 17:38:00.718043 4753 generic.go:334] "Generic (PLEG): container finished" podID="ef421b3a-61a8-4d2b-a898-caff08ee8231" containerID="24b507fc9cf7f1035986a1b120d9a3554238340c74c54783118d9180f2548a9d" exitCode=2 Dec 05 17:38:00 crc kubenswrapper[4753]: I1205 17:38:00.717855 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ef421b3a-61a8-4d2b-a898-caff08ee8231","Type":"ContainerDied","Data":"baf097a164a82f254af442ebcd5509650b111625cf07894894ad06f8a98a17d9"} Dec 05 17:38:00 crc kubenswrapper[4753]: I1205 17:38:00.718084 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ef421b3a-61a8-4d2b-a898-caff08ee8231","Type":"ContainerDied","Data":"24b507fc9cf7f1035986a1b120d9a3554238340c74c54783118d9180f2548a9d"} Dec 05 17:38:01 crc kubenswrapper[4753]: I1205 17:38:01.632403 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 17:38:01 crc kubenswrapper[4753]: I1205 17:38:01.747902 4753 generic.go:334] "Generic (PLEG): container finished" podID="ef421b3a-61a8-4d2b-a898-caff08ee8231" containerID="585991e5822f2e5746af935e2679c1def9a620efdf897d3f8baebcc742703d25" exitCode=0 Dec 05 17:38:01 crc kubenswrapper[4753]: I1205 17:38:01.747934 4753 generic.go:334] "Generic (PLEG): container finished" podID="ef421b3a-61a8-4d2b-a898-caff08ee8231" containerID="0ebb2942cb3a18e5a6a3c057ac9cbe41ceabad9e6b89abc171423ac25c0cb082" exitCode=0 Dec 05 17:38:01 crc kubenswrapper[4753]: I1205 17:38:01.752132 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 17:38:01 crc kubenswrapper[4753]: I1205 17:38:01.755724 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ef421b3a-61a8-4d2b-a898-caff08ee8231","Type":"ContainerDied","Data":"585991e5822f2e5746af935e2679c1def9a620efdf897d3f8baebcc742703d25"} Dec 05 17:38:01 crc kubenswrapper[4753]: I1205 17:38:01.755760 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ef421b3a-61a8-4d2b-a898-caff08ee8231","Type":"ContainerDied","Data":"0ebb2942cb3a18e5a6a3c057ac9cbe41ceabad9e6b89abc171423ac25c0cb082"} Dec 05 17:38:01 crc kubenswrapper[4753]: I1205 17:38:01.755772 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ef421b3a-61a8-4d2b-a898-caff08ee8231","Type":"ContainerDied","Data":"78e66313143301feec1d810f60b8160f9daea6634f861a9a98b6c2a0e3ba53ed"} Dec 05 17:38:01 crc kubenswrapper[4753]: I1205 17:38:01.755789 4753 scope.go:117] "RemoveContainer" containerID="baf097a164a82f254af442ebcd5509650b111625cf07894894ad06f8a98a17d9" Dec 05 17:38:01 crc kubenswrapper[4753]: I1205 17:38:01.783908 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/ef421b3a-61a8-4d2b-a898-caff08ee8231-ceilometer-tls-certs\") pod \"ef421b3a-61a8-4d2b-a898-caff08ee8231\" (UID: \"ef421b3a-61a8-4d2b-a898-caff08ee8231\") " Dec 05 17:38:01 crc kubenswrapper[4753]: I1205 17:38:01.784029 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-56fc6\" (UniqueName: \"kubernetes.io/projected/ef421b3a-61a8-4d2b-a898-caff08ee8231-kube-api-access-56fc6\") pod \"ef421b3a-61a8-4d2b-a898-caff08ee8231\" (UID: \"ef421b3a-61a8-4d2b-a898-caff08ee8231\") " Dec 05 17:38:01 crc kubenswrapper[4753]: I1205 17:38:01.784070 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ef421b3a-61a8-4d2b-a898-caff08ee8231-run-httpd\") pod \"ef421b3a-61a8-4d2b-a898-caff08ee8231\" (UID: \"ef421b3a-61a8-4d2b-a898-caff08ee8231\") " Dec 05 17:38:01 crc kubenswrapper[4753]: I1205 17:38:01.784142 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef421b3a-61a8-4d2b-a898-caff08ee8231-config-data\") pod \"ef421b3a-61a8-4d2b-a898-caff08ee8231\" (UID: \"ef421b3a-61a8-4d2b-a898-caff08ee8231\") " Dec 05 17:38:01 crc kubenswrapper[4753]: I1205 17:38:01.784677 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ef421b3a-61a8-4d2b-a898-caff08ee8231-log-httpd\") pod \"ef421b3a-61a8-4d2b-a898-caff08ee8231\" (UID: \"ef421b3a-61a8-4d2b-a898-caff08ee8231\") " Dec 05 17:38:01 crc kubenswrapper[4753]: I1205 17:38:01.784745 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ef421b3a-61a8-4d2b-a898-caff08ee8231-scripts\") pod \"ef421b3a-61a8-4d2b-a898-caff08ee8231\" (UID: \"ef421b3a-61a8-4d2b-a898-caff08ee8231\") " Dec 05 17:38:01 crc kubenswrapper[4753]: I1205 17:38:01.784790 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef421b3a-61a8-4d2b-a898-caff08ee8231-combined-ca-bundle\") pod \"ef421b3a-61a8-4d2b-a898-caff08ee8231\" (UID: \"ef421b3a-61a8-4d2b-a898-caff08ee8231\") " Dec 05 17:38:01 crc kubenswrapper[4753]: I1205 17:38:01.784828 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ef421b3a-61a8-4d2b-a898-caff08ee8231-sg-core-conf-yaml\") pod \"ef421b3a-61a8-4d2b-a898-caff08ee8231\" (UID: \"ef421b3a-61a8-4d2b-a898-caff08ee8231\") " Dec 05 17:38:01 crc kubenswrapper[4753]: I1205 17:38:01.784885 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ef421b3a-61a8-4d2b-a898-caff08ee8231-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "ef421b3a-61a8-4d2b-a898-caff08ee8231" (UID: "ef421b3a-61a8-4d2b-a898-caff08ee8231"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:38:01 crc kubenswrapper[4753]: I1205 17:38:01.785285 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ef421b3a-61a8-4d2b-a898-caff08ee8231-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "ef421b3a-61a8-4d2b-a898-caff08ee8231" (UID: "ef421b3a-61a8-4d2b-a898-caff08ee8231"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:38:01 crc kubenswrapper[4753]: I1205 17:38:01.785368 4753 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ef421b3a-61a8-4d2b-a898-caff08ee8231-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:01 crc kubenswrapper[4753]: I1205 17:38:01.785385 4753 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ef421b3a-61a8-4d2b-a898-caff08ee8231-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:01 crc kubenswrapper[4753]: I1205 17:38:01.793830 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ef421b3a-61a8-4d2b-a898-caff08ee8231-kube-api-access-56fc6" (OuterVolumeSpecName: "kube-api-access-56fc6") pod "ef421b3a-61a8-4d2b-a898-caff08ee8231" (UID: "ef421b3a-61a8-4d2b-a898-caff08ee8231"). InnerVolumeSpecName "kube-api-access-56fc6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:38:01 crc kubenswrapper[4753]: I1205 17:38:01.797660 4753 scope.go:117] "RemoveContainer" containerID="24b507fc9cf7f1035986a1b120d9a3554238340c74c54783118d9180f2548a9d" Dec 05 17:38:01 crc kubenswrapper[4753]: I1205 17:38:01.799370 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef421b3a-61a8-4d2b-a898-caff08ee8231-scripts" (OuterVolumeSpecName: "scripts") pod "ef421b3a-61a8-4d2b-a898-caff08ee8231" (UID: "ef421b3a-61a8-4d2b-a898-caff08ee8231"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:38:01 crc kubenswrapper[4753]: I1205 17:38:01.818013 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef421b3a-61a8-4d2b-a898-caff08ee8231-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "ef421b3a-61a8-4d2b-a898-caff08ee8231" (UID: "ef421b3a-61a8-4d2b-a898-caff08ee8231"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:38:01 crc kubenswrapper[4753]: I1205 17:38:01.889543 4753 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ef421b3a-61a8-4d2b-a898-caff08ee8231-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:01 crc kubenswrapper[4753]: I1205 17:38:01.891574 4753 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ef421b3a-61a8-4d2b-a898-caff08ee8231-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:01 crc kubenswrapper[4753]: I1205 17:38:01.891600 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-56fc6\" (UniqueName: \"kubernetes.io/projected/ef421b3a-61a8-4d2b-a898-caff08ee8231-kube-api-access-56fc6\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:01 crc kubenswrapper[4753]: I1205 17:38:01.898582 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef421b3a-61a8-4d2b-a898-caff08ee8231-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ef421b3a-61a8-4d2b-a898-caff08ee8231" (UID: "ef421b3a-61a8-4d2b-a898-caff08ee8231"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:38:01 crc kubenswrapper[4753]: I1205 17:38:01.937473 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef421b3a-61a8-4d2b-a898-caff08ee8231-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "ef421b3a-61a8-4d2b-a898-caff08ee8231" (UID: "ef421b3a-61a8-4d2b-a898-caff08ee8231"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:38:01 crc kubenswrapper[4753]: I1205 17:38:01.958716 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef421b3a-61a8-4d2b-a898-caff08ee8231-config-data" (OuterVolumeSpecName: "config-data") pod "ef421b3a-61a8-4d2b-a898-caff08ee8231" (UID: "ef421b3a-61a8-4d2b-a898-caff08ee8231"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:38:01 crc kubenswrapper[4753]: I1205 17:38:01.967366 4753 scope.go:117] "RemoveContainer" containerID="585991e5822f2e5746af935e2679c1def9a620efdf897d3f8baebcc742703d25" Dec 05 17:38:01 crc kubenswrapper[4753]: I1205 17:38:01.994183 4753 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/ef421b3a-61a8-4d2b-a898-caff08ee8231-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:01 crc kubenswrapper[4753]: I1205 17:38:01.994221 4753 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef421b3a-61a8-4d2b-a898-caff08ee8231-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:01 crc kubenswrapper[4753]: I1205 17:38:01.994244 4753 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ef421b3a-61a8-4d2b-a898-caff08ee8231-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:02 crc kubenswrapper[4753]: I1205 17:38:02.011271 4753 scope.go:117] "RemoveContainer" containerID="0ebb2942cb3a18e5a6a3c057ac9cbe41ceabad9e6b89abc171423ac25c0cb082" Dec 05 17:38:02 crc kubenswrapper[4753]: I1205 17:38:02.044456 4753 scope.go:117] "RemoveContainer" containerID="baf097a164a82f254af442ebcd5509650b111625cf07894894ad06f8a98a17d9" Dec 05 17:38:02 crc kubenswrapper[4753]: E1205 17:38:02.046009 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"baf097a164a82f254af442ebcd5509650b111625cf07894894ad06f8a98a17d9\": container with ID starting with baf097a164a82f254af442ebcd5509650b111625cf07894894ad06f8a98a17d9 not found: ID does not exist" containerID="baf097a164a82f254af442ebcd5509650b111625cf07894894ad06f8a98a17d9" Dec 05 17:38:02 crc kubenswrapper[4753]: I1205 17:38:02.046057 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"baf097a164a82f254af442ebcd5509650b111625cf07894894ad06f8a98a17d9"} err="failed to get container status \"baf097a164a82f254af442ebcd5509650b111625cf07894894ad06f8a98a17d9\": rpc error: code = NotFound desc = could not find container \"baf097a164a82f254af442ebcd5509650b111625cf07894894ad06f8a98a17d9\": container with ID starting with baf097a164a82f254af442ebcd5509650b111625cf07894894ad06f8a98a17d9 not found: ID does not exist" Dec 05 17:38:02 crc kubenswrapper[4753]: I1205 17:38:02.046088 4753 scope.go:117] "RemoveContainer" containerID="24b507fc9cf7f1035986a1b120d9a3554238340c74c54783118d9180f2548a9d" Dec 05 17:38:02 crc kubenswrapper[4753]: E1205 17:38:02.046495 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"24b507fc9cf7f1035986a1b120d9a3554238340c74c54783118d9180f2548a9d\": container with ID starting with 24b507fc9cf7f1035986a1b120d9a3554238340c74c54783118d9180f2548a9d not found: ID does not exist" containerID="24b507fc9cf7f1035986a1b120d9a3554238340c74c54783118d9180f2548a9d" Dec 05 17:38:02 crc kubenswrapper[4753]: I1205 17:38:02.046544 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"24b507fc9cf7f1035986a1b120d9a3554238340c74c54783118d9180f2548a9d"} err="failed to get container status \"24b507fc9cf7f1035986a1b120d9a3554238340c74c54783118d9180f2548a9d\": rpc error: code = NotFound desc = could not find container \"24b507fc9cf7f1035986a1b120d9a3554238340c74c54783118d9180f2548a9d\": container with ID starting with 24b507fc9cf7f1035986a1b120d9a3554238340c74c54783118d9180f2548a9d not found: ID does not exist" Dec 05 17:38:02 crc kubenswrapper[4753]: I1205 17:38:02.046568 4753 scope.go:117] "RemoveContainer" containerID="585991e5822f2e5746af935e2679c1def9a620efdf897d3f8baebcc742703d25" Dec 05 17:38:02 crc kubenswrapper[4753]: E1205 17:38:02.046893 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"585991e5822f2e5746af935e2679c1def9a620efdf897d3f8baebcc742703d25\": container with ID starting with 585991e5822f2e5746af935e2679c1def9a620efdf897d3f8baebcc742703d25 not found: ID does not exist" containerID="585991e5822f2e5746af935e2679c1def9a620efdf897d3f8baebcc742703d25" Dec 05 17:38:02 crc kubenswrapper[4753]: I1205 17:38:02.046916 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"585991e5822f2e5746af935e2679c1def9a620efdf897d3f8baebcc742703d25"} err="failed to get container status \"585991e5822f2e5746af935e2679c1def9a620efdf897d3f8baebcc742703d25\": rpc error: code = NotFound desc = could not find container \"585991e5822f2e5746af935e2679c1def9a620efdf897d3f8baebcc742703d25\": container with ID starting with 585991e5822f2e5746af935e2679c1def9a620efdf897d3f8baebcc742703d25 not found: ID does not exist" Dec 05 17:38:02 crc kubenswrapper[4753]: I1205 17:38:02.046933 4753 scope.go:117] "RemoveContainer" containerID="0ebb2942cb3a18e5a6a3c057ac9cbe41ceabad9e6b89abc171423ac25c0cb082" Dec 05 17:38:02 crc kubenswrapper[4753]: E1205 17:38:02.047277 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0ebb2942cb3a18e5a6a3c057ac9cbe41ceabad9e6b89abc171423ac25c0cb082\": container with ID starting with 0ebb2942cb3a18e5a6a3c057ac9cbe41ceabad9e6b89abc171423ac25c0cb082 not found: ID does not exist" containerID="0ebb2942cb3a18e5a6a3c057ac9cbe41ceabad9e6b89abc171423ac25c0cb082" Dec 05 17:38:02 crc kubenswrapper[4753]: I1205 17:38:02.047305 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0ebb2942cb3a18e5a6a3c057ac9cbe41ceabad9e6b89abc171423ac25c0cb082"} err="failed to get container status \"0ebb2942cb3a18e5a6a3c057ac9cbe41ceabad9e6b89abc171423ac25c0cb082\": rpc error: code = NotFound desc = could not find container \"0ebb2942cb3a18e5a6a3c057ac9cbe41ceabad9e6b89abc171423ac25c0cb082\": container with ID starting with 0ebb2942cb3a18e5a6a3c057ac9cbe41ceabad9e6b89abc171423ac25c0cb082 not found: ID does not exist" Dec 05 17:38:02 crc kubenswrapper[4753]: I1205 17:38:02.047323 4753 scope.go:117] "RemoveContainer" containerID="baf097a164a82f254af442ebcd5509650b111625cf07894894ad06f8a98a17d9" Dec 05 17:38:02 crc kubenswrapper[4753]: I1205 17:38:02.047775 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"baf097a164a82f254af442ebcd5509650b111625cf07894894ad06f8a98a17d9"} err="failed to get container status \"baf097a164a82f254af442ebcd5509650b111625cf07894894ad06f8a98a17d9\": rpc error: code = NotFound desc = could not find container \"baf097a164a82f254af442ebcd5509650b111625cf07894894ad06f8a98a17d9\": container with ID starting with baf097a164a82f254af442ebcd5509650b111625cf07894894ad06f8a98a17d9 not found: ID does not exist" Dec 05 17:38:02 crc kubenswrapper[4753]: I1205 17:38:02.047831 4753 scope.go:117] "RemoveContainer" containerID="24b507fc9cf7f1035986a1b120d9a3554238340c74c54783118d9180f2548a9d" Dec 05 17:38:02 crc kubenswrapper[4753]: I1205 17:38:02.048165 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"24b507fc9cf7f1035986a1b120d9a3554238340c74c54783118d9180f2548a9d"} err="failed to get container status \"24b507fc9cf7f1035986a1b120d9a3554238340c74c54783118d9180f2548a9d\": rpc error: code = NotFound desc = could not find container \"24b507fc9cf7f1035986a1b120d9a3554238340c74c54783118d9180f2548a9d\": container with ID starting with 24b507fc9cf7f1035986a1b120d9a3554238340c74c54783118d9180f2548a9d not found: ID does not exist" Dec 05 17:38:02 crc kubenswrapper[4753]: I1205 17:38:02.048189 4753 scope.go:117] "RemoveContainer" containerID="585991e5822f2e5746af935e2679c1def9a620efdf897d3f8baebcc742703d25" Dec 05 17:38:02 crc kubenswrapper[4753]: I1205 17:38:02.048588 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"585991e5822f2e5746af935e2679c1def9a620efdf897d3f8baebcc742703d25"} err="failed to get container status \"585991e5822f2e5746af935e2679c1def9a620efdf897d3f8baebcc742703d25\": rpc error: code = NotFound desc = could not find container \"585991e5822f2e5746af935e2679c1def9a620efdf897d3f8baebcc742703d25\": container with ID starting with 585991e5822f2e5746af935e2679c1def9a620efdf897d3f8baebcc742703d25 not found: ID does not exist" Dec 05 17:38:02 crc kubenswrapper[4753]: I1205 17:38:02.048613 4753 scope.go:117] "RemoveContainer" containerID="0ebb2942cb3a18e5a6a3c057ac9cbe41ceabad9e6b89abc171423ac25c0cb082" Dec 05 17:38:02 crc kubenswrapper[4753]: I1205 17:38:02.050286 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0ebb2942cb3a18e5a6a3c057ac9cbe41ceabad9e6b89abc171423ac25c0cb082"} err="failed to get container status \"0ebb2942cb3a18e5a6a3c057ac9cbe41ceabad9e6b89abc171423ac25c0cb082\": rpc error: code = NotFound desc = could not find container \"0ebb2942cb3a18e5a6a3c057ac9cbe41ceabad9e6b89abc171423ac25c0cb082\": container with ID starting with 0ebb2942cb3a18e5a6a3c057ac9cbe41ceabad9e6b89abc171423ac25c0cb082 not found: ID does not exist" Dec 05 17:38:02 crc kubenswrapper[4753]: I1205 17:38:02.107430 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:38:02 crc kubenswrapper[4753]: I1205 17:38:02.126927 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:38:02 crc kubenswrapper[4753]: I1205 17:38:02.138812 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:38:02 crc kubenswrapper[4753]: E1205 17:38:02.140157 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef421b3a-61a8-4d2b-a898-caff08ee8231" containerName="sg-core" Dec 05 17:38:02 crc kubenswrapper[4753]: I1205 17:38:02.140178 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef421b3a-61a8-4d2b-a898-caff08ee8231" containerName="sg-core" Dec 05 17:38:02 crc kubenswrapper[4753]: E1205 17:38:02.140202 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef421b3a-61a8-4d2b-a898-caff08ee8231" containerName="ceilometer-central-agent" Dec 05 17:38:02 crc kubenswrapper[4753]: I1205 17:38:02.140209 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef421b3a-61a8-4d2b-a898-caff08ee8231" containerName="ceilometer-central-agent" Dec 05 17:38:02 crc kubenswrapper[4753]: E1205 17:38:02.140240 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef421b3a-61a8-4d2b-a898-caff08ee8231" containerName="ceilometer-notification-agent" Dec 05 17:38:02 crc kubenswrapper[4753]: I1205 17:38:02.140246 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef421b3a-61a8-4d2b-a898-caff08ee8231" containerName="ceilometer-notification-agent" Dec 05 17:38:02 crc kubenswrapper[4753]: E1205 17:38:02.140258 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef421b3a-61a8-4d2b-a898-caff08ee8231" containerName="proxy-httpd" Dec 05 17:38:02 crc kubenswrapper[4753]: I1205 17:38:02.140264 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef421b3a-61a8-4d2b-a898-caff08ee8231" containerName="proxy-httpd" Dec 05 17:38:02 crc kubenswrapper[4753]: I1205 17:38:02.140459 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef421b3a-61a8-4d2b-a898-caff08ee8231" containerName="proxy-httpd" Dec 05 17:38:02 crc kubenswrapper[4753]: I1205 17:38:02.140476 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef421b3a-61a8-4d2b-a898-caff08ee8231" containerName="sg-core" Dec 05 17:38:02 crc kubenswrapper[4753]: I1205 17:38:02.140493 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef421b3a-61a8-4d2b-a898-caff08ee8231" containerName="ceilometer-notification-agent" Dec 05 17:38:02 crc kubenswrapper[4753]: I1205 17:38:02.140503 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef421b3a-61a8-4d2b-a898-caff08ee8231" containerName="ceilometer-central-agent" Dec 05 17:38:02 crc kubenswrapper[4753]: I1205 17:38:02.142499 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 17:38:02 crc kubenswrapper[4753]: I1205 17:38:02.171443 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:38:02 crc kubenswrapper[4753]: I1205 17:38:02.201497 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 17:38:02 crc kubenswrapper[4753]: I1205 17:38:02.201766 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 17:38:02 crc kubenswrapper[4753]: I1205 17:38:02.201953 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 05 17:38:02 crc kubenswrapper[4753]: I1205 17:38:02.304352 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5e8ed50a-c2d1-47b1-92e2-db51cc75a4c7-log-httpd\") pod \"ceilometer-0\" (UID: \"5e8ed50a-c2d1-47b1-92e2-db51cc75a4c7\") " pod="openstack/ceilometer-0" Dec 05 17:38:02 crc kubenswrapper[4753]: I1205 17:38:02.304628 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5e8ed50a-c2d1-47b1-92e2-db51cc75a4c7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5e8ed50a-c2d1-47b1-92e2-db51cc75a4c7\") " pod="openstack/ceilometer-0" Dec 05 17:38:02 crc kubenswrapper[4753]: I1205 17:38:02.304691 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5e8ed50a-c2d1-47b1-92e2-db51cc75a4c7-run-httpd\") pod \"ceilometer-0\" (UID: \"5e8ed50a-c2d1-47b1-92e2-db51cc75a4c7\") " pod="openstack/ceilometer-0" Dec 05 17:38:02 crc kubenswrapper[4753]: I1205 17:38:02.304748 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5e8ed50a-c2d1-47b1-92e2-db51cc75a4c7-scripts\") pod \"ceilometer-0\" (UID: \"5e8ed50a-c2d1-47b1-92e2-db51cc75a4c7\") " pod="openstack/ceilometer-0" Dec 05 17:38:02 crc kubenswrapper[4753]: I1205 17:38:02.304794 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e8ed50a-c2d1-47b1-92e2-db51cc75a4c7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5e8ed50a-c2d1-47b1-92e2-db51cc75a4c7\") " pod="openstack/ceilometer-0" Dec 05 17:38:02 crc kubenswrapper[4753]: I1205 17:38:02.304810 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p46gr\" (UniqueName: \"kubernetes.io/projected/5e8ed50a-c2d1-47b1-92e2-db51cc75a4c7-kube-api-access-p46gr\") pod \"ceilometer-0\" (UID: \"5e8ed50a-c2d1-47b1-92e2-db51cc75a4c7\") " pod="openstack/ceilometer-0" Dec 05 17:38:02 crc kubenswrapper[4753]: I1205 17:38:02.304839 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e8ed50a-c2d1-47b1-92e2-db51cc75a4c7-config-data\") pod \"ceilometer-0\" (UID: \"5e8ed50a-c2d1-47b1-92e2-db51cc75a4c7\") " pod="openstack/ceilometer-0" Dec 05 17:38:02 crc kubenswrapper[4753]: I1205 17:38:02.304854 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/5e8ed50a-c2d1-47b1-92e2-db51cc75a4c7-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"5e8ed50a-c2d1-47b1-92e2-db51cc75a4c7\") " pod="openstack/ceilometer-0" Dec 05 17:38:02 crc kubenswrapper[4753]: I1205 17:38:02.406851 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5e8ed50a-c2d1-47b1-92e2-db51cc75a4c7-run-httpd\") pod \"ceilometer-0\" (UID: \"5e8ed50a-c2d1-47b1-92e2-db51cc75a4c7\") " pod="openstack/ceilometer-0" Dec 05 17:38:02 crc kubenswrapper[4753]: I1205 17:38:02.406919 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5e8ed50a-c2d1-47b1-92e2-db51cc75a4c7-scripts\") pod \"ceilometer-0\" (UID: \"5e8ed50a-c2d1-47b1-92e2-db51cc75a4c7\") " pod="openstack/ceilometer-0" Dec 05 17:38:02 crc kubenswrapper[4753]: I1205 17:38:02.406971 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e8ed50a-c2d1-47b1-92e2-db51cc75a4c7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5e8ed50a-c2d1-47b1-92e2-db51cc75a4c7\") " pod="openstack/ceilometer-0" Dec 05 17:38:02 crc kubenswrapper[4753]: I1205 17:38:02.406987 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p46gr\" (UniqueName: \"kubernetes.io/projected/5e8ed50a-c2d1-47b1-92e2-db51cc75a4c7-kube-api-access-p46gr\") pod \"ceilometer-0\" (UID: \"5e8ed50a-c2d1-47b1-92e2-db51cc75a4c7\") " pod="openstack/ceilometer-0" Dec 05 17:38:02 crc kubenswrapper[4753]: I1205 17:38:02.407019 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e8ed50a-c2d1-47b1-92e2-db51cc75a4c7-config-data\") pod \"ceilometer-0\" (UID: \"5e8ed50a-c2d1-47b1-92e2-db51cc75a4c7\") " pod="openstack/ceilometer-0" Dec 05 17:38:02 crc kubenswrapper[4753]: I1205 17:38:02.407035 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/5e8ed50a-c2d1-47b1-92e2-db51cc75a4c7-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"5e8ed50a-c2d1-47b1-92e2-db51cc75a4c7\") " pod="openstack/ceilometer-0" Dec 05 17:38:02 crc kubenswrapper[4753]: I1205 17:38:02.407070 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5e8ed50a-c2d1-47b1-92e2-db51cc75a4c7-log-httpd\") pod \"ceilometer-0\" (UID: \"5e8ed50a-c2d1-47b1-92e2-db51cc75a4c7\") " pod="openstack/ceilometer-0" Dec 05 17:38:02 crc kubenswrapper[4753]: I1205 17:38:02.407115 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5e8ed50a-c2d1-47b1-92e2-db51cc75a4c7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5e8ed50a-c2d1-47b1-92e2-db51cc75a4c7\") " pod="openstack/ceilometer-0" Dec 05 17:38:02 crc kubenswrapper[4753]: I1205 17:38:02.408281 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5e8ed50a-c2d1-47b1-92e2-db51cc75a4c7-log-httpd\") pod \"ceilometer-0\" (UID: \"5e8ed50a-c2d1-47b1-92e2-db51cc75a4c7\") " pod="openstack/ceilometer-0" Dec 05 17:38:02 crc kubenswrapper[4753]: I1205 17:38:02.409134 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5e8ed50a-c2d1-47b1-92e2-db51cc75a4c7-run-httpd\") pod \"ceilometer-0\" (UID: \"5e8ed50a-c2d1-47b1-92e2-db51cc75a4c7\") " pod="openstack/ceilometer-0" Dec 05 17:38:02 crc kubenswrapper[4753]: I1205 17:38:02.411949 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e8ed50a-c2d1-47b1-92e2-db51cc75a4c7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5e8ed50a-c2d1-47b1-92e2-db51cc75a4c7\") " pod="openstack/ceilometer-0" Dec 05 17:38:02 crc kubenswrapper[4753]: I1205 17:38:02.413066 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e8ed50a-c2d1-47b1-92e2-db51cc75a4c7-config-data\") pod \"ceilometer-0\" (UID: \"5e8ed50a-c2d1-47b1-92e2-db51cc75a4c7\") " pod="openstack/ceilometer-0" Dec 05 17:38:02 crc kubenswrapper[4753]: I1205 17:38:02.413385 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5e8ed50a-c2d1-47b1-92e2-db51cc75a4c7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5e8ed50a-c2d1-47b1-92e2-db51cc75a4c7\") " pod="openstack/ceilometer-0" Dec 05 17:38:02 crc kubenswrapper[4753]: I1205 17:38:02.417675 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/5e8ed50a-c2d1-47b1-92e2-db51cc75a4c7-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"5e8ed50a-c2d1-47b1-92e2-db51cc75a4c7\") " pod="openstack/ceilometer-0" Dec 05 17:38:02 crc kubenswrapper[4753]: I1205 17:38:02.417681 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5e8ed50a-c2d1-47b1-92e2-db51cc75a4c7-scripts\") pod \"ceilometer-0\" (UID: \"5e8ed50a-c2d1-47b1-92e2-db51cc75a4c7\") " pod="openstack/ceilometer-0" Dec 05 17:38:02 crc kubenswrapper[4753]: I1205 17:38:02.432798 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p46gr\" (UniqueName: \"kubernetes.io/projected/5e8ed50a-c2d1-47b1-92e2-db51cc75a4c7-kube-api-access-p46gr\") pod \"ceilometer-0\" (UID: \"5e8ed50a-c2d1-47b1-92e2-db51cc75a4c7\") " pod="openstack/ceilometer-0" Dec 05 17:38:02 crc kubenswrapper[4753]: I1205 17:38:02.514816 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 17:38:03 crc kubenswrapper[4753]: I1205 17:38:03.151047 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:38:03 crc kubenswrapper[4753]: W1205 17:38:03.171746 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5e8ed50a_c2d1_47b1_92e2_db51cc75a4c7.slice/crio-33d0744e47c08f64c623e576ab2caef4a89d224f3e8046a656a79a3a2744e5a7 WatchSource:0}: Error finding container 33d0744e47c08f64c623e576ab2caef4a89d224f3e8046a656a79a3a2744e5a7: Status 404 returned error can't find the container with id 33d0744e47c08f64c623e576ab2caef4a89d224f3e8046a656a79a3a2744e5a7 Dec 05 17:38:03 crc kubenswrapper[4753]: I1205 17:38:03.734762 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ef421b3a-61a8-4d2b-a898-caff08ee8231" path="/var/lib/kubelet/pods/ef421b3a-61a8-4d2b-a898-caff08ee8231/volumes" Dec 05 17:38:03 crc kubenswrapper[4753]: I1205 17:38:03.817347 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5e8ed50a-c2d1-47b1-92e2-db51cc75a4c7","Type":"ContainerStarted","Data":"33d0744e47c08f64c623e576ab2caef4a89d224f3e8046a656a79a3a2744e5a7"} Dec 05 17:38:04 crc kubenswrapper[4753]: I1205 17:38:04.791006 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="36088f6e-7c3c-4fee-918e-e1ee91bf6b33" containerName="rabbitmq" containerID="cri-o://10668d64fe2c712c2486402a9956161db31594987a12e3d235797dcb37d29bf6" gracePeriod=604795 Dec 05 17:38:05 crc kubenswrapper[4753]: I1205 17:38:05.415636 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="f5652c22-3bf2-454d-a4cf-fd0378f133b8" containerName="rabbitmq" containerID="cri-o://3c247339d27c9ec5ba0733cc0a9a7a3d287053286490b1cea64952a8c4b81b32" gracePeriod=604796 Dec 05 17:38:06 crc kubenswrapper[4753]: I1205 17:38:06.091247 4753 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="36088f6e-7c3c-4fee-918e-e1ee91bf6b33" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.111:5671: connect: connection refused" Dec 05 17:38:06 crc kubenswrapper[4753]: I1205 17:38:06.534438 4753 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="f5652c22-3bf2-454d-a4cf-fd0378f133b8" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.112:5671: connect: connection refused" Dec 05 17:38:11 crc kubenswrapper[4753]: I1205 17:38:11.960879 4753 generic.go:334] "Generic (PLEG): container finished" podID="36088f6e-7c3c-4fee-918e-e1ee91bf6b33" containerID="10668d64fe2c712c2486402a9956161db31594987a12e3d235797dcb37d29bf6" exitCode=0 Dec 05 17:38:11 crc kubenswrapper[4753]: I1205 17:38:11.961120 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"36088f6e-7c3c-4fee-918e-e1ee91bf6b33","Type":"ContainerDied","Data":"10668d64fe2c712c2486402a9956161db31594987a12e3d235797dcb37d29bf6"} Dec 05 17:38:13 crc kubenswrapper[4753]: I1205 17:38:13.024546 4753 generic.go:334] "Generic (PLEG): container finished" podID="f5652c22-3bf2-454d-a4cf-fd0378f133b8" containerID="3c247339d27c9ec5ba0733cc0a9a7a3d287053286490b1cea64952a8c4b81b32" exitCode=0 Dec 05 17:38:13 crc kubenswrapper[4753]: I1205 17:38:13.024599 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"f5652c22-3bf2-454d-a4cf-fd0378f133b8","Type":"ContainerDied","Data":"3c247339d27c9ec5ba0733cc0a9a7a3d287053286490b1cea64952a8c4b81b32"} Dec 05 17:38:13 crc kubenswrapper[4753]: I1205 17:38:13.752671 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-595979776c-hpkz9"] Dec 05 17:38:13 crc kubenswrapper[4753]: I1205 17:38:13.754853 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-595979776c-hpkz9" Dec 05 17:38:13 crc kubenswrapper[4753]: I1205 17:38:13.757075 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-edpm-ipam" Dec 05 17:38:13 crc kubenswrapper[4753]: I1205 17:38:13.772805 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-595979776c-hpkz9"] Dec 05 17:38:13 crc kubenswrapper[4753]: I1205 17:38:13.873371 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/75929efc-b524-4475-bcbc-e692e3353815-openstack-edpm-ipam\") pod \"dnsmasq-dns-595979776c-hpkz9\" (UID: \"75929efc-b524-4475-bcbc-e692e3353815\") " pod="openstack/dnsmasq-dns-595979776c-hpkz9" Dec 05 17:38:13 crc kubenswrapper[4753]: I1205 17:38:13.873452 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/75929efc-b524-4475-bcbc-e692e3353815-dns-swift-storage-0\") pod \"dnsmasq-dns-595979776c-hpkz9\" (UID: \"75929efc-b524-4475-bcbc-e692e3353815\") " pod="openstack/dnsmasq-dns-595979776c-hpkz9" Dec 05 17:38:13 crc kubenswrapper[4753]: I1205 17:38:13.873714 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/75929efc-b524-4475-bcbc-e692e3353815-dns-svc\") pod \"dnsmasq-dns-595979776c-hpkz9\" (UID: \"75929efc-b524-4475-bcbc-e692e3353815\") " pod="openstack/dnsmasq-dns-595979776c-hpkz9" Dec 05 17:38:13 crc kubenswrapper[4753]: I1205 17:38:13.873816 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/75929efc-b524-4475-bcbc-e692e3353815-ovsdbserver-nb\") pod \"dnsmasq-dns-595979776c-hpkz9\" (UID: \"75929efc-b524-4475-bcbc-e692e3353815\") " pod="openstack/dnsmasq-dns-595979776c-hpkz9" Dec 05 17:38:13 crc kubenswrapper[4753]: I1205 17:38:13.873868 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/75929efc-b524-4475-bcbc-e692e3353815-config\") pod \"dnsmasq-dns-595979776c-hpkz9\" (UID: \"75929efc-b524-4475-bcbc-e692e3353815\") " pod="openstack/dnsmasq-dns-595979776c-hpkz9" Dec 05 17:38:13 crc kubenswrapper[4753]: I1205 17:38:13.874089 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/75929efc-b524-4475-bcbc-e692e3353815-ovsdbserver-sb\") pod \"dnsmasq-dns-595979776c-hpkz9\" (UID: \"75929efc-b524-4475-bcbc-e692e3353815\") " pod="openstack/dnsmasq-dns-595979776c-hpkz9" Dec 05 17:38:13 crc kubenswrapper[4753]: I1205 17:38:13.874195 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2dmxr\" (UniqueName: \"kubernetes.io/projected/75929efc-b524-4475-bcbc-e692e3353815-kube-api-access-2dmxr\") pod \"dnsmasq-dns-595979776c-hpkz9\" (UID: \"75929efc-b524-4475-bcbc-e692e3353815\") " pod="openstack/dnsmasq-dns-595979776c-hpkz9" Dec 05 17:38:13 crc kubenswrapper[4753]: I1205 17:38:13.976507 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/75929efc-b524-4475-bcbc-e692e3353815-dns-svc\") pod \"dnsmasq-dns-595979776c-hpkz9\" (UID: \"75929efc-b524-4475-bcbc-e692e3353815\") " pod="openstack/dnsmasq-dns-595979776c-hpkz9" Dec 05 17:38:13 crc kubenswrapper[4753]: I1205 17:38:13.976566 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/75929efc-b524-4475-bcbc-e692e3353815-ovsdbserver-nb\") pod \"dnsmasq-dns-595979776c-hpkz9\" (UID: \"75929efc-b524-4475-bcbc-e692e3353815\") " pod="openstack/dnsmasq-dns-595979776c-hpkz9" Dec 05 17:38:13 crc kubenswrapper[4753]: I1205 17:38:13.976594 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/75929efc-b524-4475-bcbc-e692e3353815-config\") pod \"dnsmasq-dns-595979776c-hpkz9\" (UID: \"75929efc-b524-4475-bcbc-e692e3353815\") " pod="openstack/dnsmasq-dns-595979776c-hpkz9" Dec 05 17:38:13 crc kubenswrapper[4753]: I1205 17:38:13.976637 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/75929efc-b524-4475-bcbc-e692e3353815-ovsdbserver-sb\") pod \"dnsmasq-dns-595979776c-hpkz9\" (UID: \"75929efc-b524-4475-bcbc-e692e3353815\") " pod="openstack/dnsmasq-dns-595979776c-hpkz9" Dec 05 17:38:13 crc kubenswrapper[4753]: I1205 17:38:13.976665 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2dmxr\" (UniqueName: \"kubernetes.io/projected/75929efc-b524-4475-bcbc-e692e3353815-kube-api-access-2dmxr\") pod \"dnsmasq-dns-595979776c-hpkz9\" (UID: \"75929efc-b524-4475-bcbc-e692e3353815\") " pod="openstack/dnsmasq-dns-595979776c-hpkz9" Dec 05 17:38:13 crc kubenswrapper[4753]: I1205 17:38:13.976733 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/75929efc-b524-4475-bcbc-e692e3353815-openstack-edpm-ipam\") pod \"dnsmasq-dns-595979776c-hpkz9\" (UID: \"75929efc-b524-4475-bcbc-e692e3353815\") " pod="openstack/dnsmasq-dns-595979776c-hpkz9" Dec 05 17:38:13 crc kubenswrapper[4753]: I1205 17:38:13.976775 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/75929efc-b524-4475-bcbc-e692e3353815-dns-swift-storage-0\") pod \"dnsmasq-dns-595979776c-hpkz9\" (UID: \"75929efc-b524-4475-bcbc-e692e3353815\") " pod="openstack/dnsmasq-dns-595979776c-hpkz9" Dec 05 17:38:13 crc kubenswrapper[4753]: I1205 17:38:13.977759 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/75929efc-b524-4475-bcbc-e692e3353815-ovsdbserver-sb\") pod \"dnsmasq-dns-595979776c-hpkz9\" (UID: \"75929efc-b524-4475-bcbc-e692e3353815\") " pod="openstack/dnsmasq-dns-595979776c-hpkz9" Dec 05 17:38:13 crc kubenswrapper[4753]: I1205 17:38:13.977780 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/75929efc-b524-4475-bcbc-e692e3353815-ovsdbserver-nb\") pod \"dnsmasq-dns-595979776c-hpkz9\" (UID: \"75929efc-b524-4475-bcbc-e692e3353815\") " pod="openstack/dnsmasq-dns-595979776c-hpkz9" Dec 05 17:38:13 crc kubenswrapper[4753]: I1205 17:38:13.977787 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/75929efc-b524-4475-bcbc-e692e3353815-config\") pod \"dnsmasq-dns-595979776c-hpkz9\" (UID: \"75929efc-b524-4475-bcbc-e692e3353815\") " pod="openstack/dnsmasq-dns-595979776c-hpkz9" Dec 05 17:38:13 crc kubenswrapper[4753]: I1205 17:38:13.977957 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/75929efc-b524-4475-bcbc-e692e3353815-dns-swift-storage-0\") pod \"dnsmasq-dns-595979776c-hpkz9\" (UID: \"75929efc-b524-4475-bcbc-e692e3353815\") " pod="openstack/dnsmasq-dns-595979776c-hpkz9" Dec 05 17:38:13 crc kubenswrapper[4753]: I1205 17:38:13.978376 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/75929efc-b524-4475-bcbc-e692e3353815-dns-svc\") pod \"dnsmasq-dns-595979776c-hpkz9\" (UID: \"75929efc-b524-4475-bcbc-e692e3353815\") " pod="openstack/dnsmasq-dns-595979776c-hpkz9" Dec 05 17:38:13 crc kubenswrapper[4753]: I1205 17:38:13.978456 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/75929efc-b524-4475-bcbc-e692e3353815-openstack-edpm-ipam\") pod \"dnsmasq-dns-595979776c-hpkz9\" (UID: \"75929efc-b524-4475-bcbc-e692e3353815\") " pod="openstack/dnsmasq-dns-595979776c-hpkz9" Dec 05 17:38:13 crc kubenswrapper[4753]: I1205 17:38:13.999904 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2dmxr\" (UniqueName: \"kubernetes.io/projected/75929efc-b524-4475-bcbc-e692e3353815-kube-api-access-2dmxr\") pod \"dnsmasq-dns-595979776c-hpkz9\" (UID: \"75929efc-b524-4475-bcbc-e692e3353815\") " pod="openstack/dnsmasq-dns-595979776c-hpkz9" Dec 05 17:38:14 crc kubenswrapper[4753]: I1205 17:38:14.077509 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-595979776c-hpkz9" Dec 05 17:38:20 crc kubenswrapper[4753]: I1205 17:38:20.663275 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 17:38:20 crc kubenswrapper[4753]: I1205 17:38:20.668702 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:38:20 crc kubenswrapper[4753]: I1205 17:38:20.742651 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/f5652c22-3bf2-454d-a4cf-fd0378f133b8-pod-info\") pod \"f5652c22-3bf2-454d-a4cf-fd0378f133b8\" (UID: \"f5652c22-3bf2-454d-a4cf-fd0378f133b8\") " Dec 05 17:38:20 crc kubenswrapper[4753]: I1205 17:38:20.742725 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f5652c22-3bf2-454d-a4cf-fd0378f133b8-config-data\") pod \"f5652c22-3bf2-454d-a4cf-fd0378f133b8\" (UID: \"f5652c22-3bf2-454d-a4cf-fd0378f133b8\") " Dec 05 17:38:20 crc kubenswrapper[4753]: I1205 17:38:20.742770 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/36088f6e-7c3c-4fee-918e-e1ee91bf6b33-erlang-cookie-secret\") pod \"36088f6e-7c3c-4fee-918e-e1ee91bf6b33\" (UID: \"36088f6e-7c3c-4fee-918e-e1ee91bf6b33\") " Dec 05 17:38:20 crc kubenswrapper[4753]: I1205 17:38:20.742821 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/36088f6e-7c3c-4fee-918e-e1ee91bf6b33-rabbitmq-tls\") pod \"36088f6e-7c3c-4fee-918e-e1ee91bf6b33\" (UID: \"36088f6e-7c3c-4fee-918e-e1ee91bf6b33\") " Dec 05 17:38:20 crc kubenswrapper[4753]: I1205 17:38:20.742892 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/f5652c22-3bf2-454d-a4cf-fd0378f133b8-server-conf\") pod \"f5652c22-3bf2-454d-a4cf-fd0378f133b8\" (UID: \"f5652c22-3bf2-454d-a4cf-fd0378f133b8\") " Dec 05 17:38:20 crc kubenswrapper[4753]: I1205 17:38:20.742922 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/f5652c22-3bf2-454d-a4cf-fd0378f133b8-erlang-cookie-secret\") pod \"f5652c22-3bf2-454d-a4cf-fd0378f133b8\" (UID: \"f5652c22-3bf2-454d-a4cf-fd0378f133b8\") " Dec 05 17:38:20 crc kubenswrapper[4753]: I1205 17:38:20.742952 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/f5652c22-3bf2-454d-a4cf-fd0378f133b8-rabbitmq-erlang-cookie\") pod \"f5652c22-3bf2-454d-a4cf-fd0378f133b8\" (UID: \"f5652c22-3bf2-454d-a4cf-fd0378f133b8\") " Dec 05 17:38:20 crc kubenswrapper[4753]: I1205 17:38:20.743520 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b8025f92-62ab-4c6b-8781-4ac1792398cb\") pod \"36088f6e-7c3c-4fee-918e-e1ee91bf6b33\" (UID: \"36088f6e-7c3c-4fee-918e-e1ee91bf6b33\") " Dec 05 17:38:20 crc kubenswrapper[4753]: I1205 17:38:20.743553 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/36088f6e-7c3c-4fee-918e-e1ee91bf6b33-rabbitmq-plugins\") pod \"36088f6e-7c3c-4fee-918e-e1ee91bf6b33\" (UID: \"36088f6e-7c3c-4fee-918e-e1ee91bf6b33\") " Dec 05 17:38:20 crc kubenswrapper[4753]: I1205 17:38:20.743765 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-580d11aa-59c8-4f4f-99a0-0bfb50f7476c\") pod \"f5652c22-3bf2-454d-a4cf-fd0378f133b8\" (UID: \"f5652c22-3bf2-454d-a4cf-fd0378f133b8\") " Dec 05 17:38:20 crc kubenswrapper[4753]: I1205 17:38:20.743802 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5m27l\" (UniqueName: \"kubernetes.io/projected/f5652c22-3bf2-454d-a4cf-fd0378f133b8-kube-api-access-5m27l\") pod \"f5652c22-3bf2-454d-a4cf-fd0378f133b8\" (UID: \"f5652c22-3bf2-454d-a4cf-fd0378f133b8\") " Dec 05 17:38:20 crc kubenswrapper[4753]: I1205 17:38:20.743826 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/36088f6e-7c3c-4fee-918e-e1ee91bf6b33-pod-info\") pod \"36088f6e-7c3c-4fee-918e-e1ee91bf6b33\" (UID: \"36088f6e-7c3c-4fee-918e-e1ee91bf6b33\") " Dec 05 17:38:20 crc kubenswrapper[4753]: I1205 17:38:20.743881 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/f5652c22-3bf2-454d-a4cf-fd0378f133b8-plugins-conf\") pod \"f5652c22-3bf2-454d-a4cf-fd0378f133b8\" (UID: \"f5652c22-3bf2-454d-a4cf-fd0378f133b8\") " Dec 05 17:38:20 crc kubenswrapper[4753]: I1205 17:38:20.743913 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/36088f6e-7c3c-4fee-918e-e1ee91bf6b33-rabbitmq-erlang-cookie\") pod \"36088f6e-7c3c-4fee-918e-e1ee91bf6b33\" (UID: \"36088f6e-7c3c-4fee-918e-e1ee91bf6b33\") " Dec 05 17:38:20 crc kubenswrapper[4753]: I1205 17:38:20.743934 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/f5652c22-3bf2-454d-a4cf-fd0378f133b8-rabbitmq-plugins\") pod \"f5652c22-3bf2-454d-a4cf-fd0378f133b8\" (UID: \"f5652c22-3bf2-454d-a4cf-fd0378f133b8\") " Dec 05 17:38:20 crc kubenswrapper[4753]: I1205 17:38:20.743959 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lskb5\" (UniqueName: \"kubernetes.io/projected/36088f6e-7c3c-4fee-918e-e1ee91bf6b33-kube-api-access-lskb5\") pod \"36088f6e-7c3c-4fee-918e-e1ee91bf6b33\" (UID: \"36088f6e-7c3c-4fee-918e-e1ee91bf6b33\") " Dec 05 17:38:20 crc kubenswrapper[4753]: I1205 17:38:20.743981 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/36088f6e-7c3c-4fee-918e-e1ee91bf6b33-server-conf\") pod \"36088f6e-7c3c-4fee-918e-e1ee91bf6b33\" (UID: \"36088f6e-7c3c-4fee-918e-e1ee91bf6b33\") " Dec 05 17:38:20 crc kubenswrapper[4753]: I1205 17:38:20.744029 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/f5652c22-3bf2-454d-a4cf-fd0378f133b8-rabbitmq-confd\") pod \"f5652c22-3bf2-454d-a4cf-fd0378f133b8\" (UID: \"f5652c22-3bf2-454d-a4cf-fd0378f133b8\") " Dec 05 17:38:20 crc kubenswrapper[4753]: I1205 17:38:20.744051 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/36088f6e-7c3c-4fee-918e-e1ee91bf6b33-plugins-conf\") pod \"36088f6e-7c3c-4fee-918e-e1ee91bf6b33\" (UID: \"36088f6e-7c3c-4fee-918e-e1ee91bf6b33\") " Dec 05 17:38:20 crc kubenswrapper[4753]: I1205 17:38:20.744071 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/36088f6e-7c3c-4fee-918e-e1ee91bf6b33-config-data\") pod \"36088f6e-7c3c-4fee-918e-e1ee91bf6b33\" (UID: \"36088f6e-7c3c-4fee-918e-e1ee91bf6b33\") " Dec 05 17:38:20 crc kubenswrapper[4753]: I1205 17:38:20.744098 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/f5652c22-3bf2-454d-a4cf-fd0378f133b8-rabbitmq-tls\") pod \"f5652c22-3bf2-454d-a4cf-fd0378f133b8\" (UID: \"f5652c22-3bf2-454d-a4cf-fd0378f133b8\") " Dec 05 17:38:20 crc kubenswrapper[4753]: I1205 17:38:20.744166 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/36088f6e-7c3c-4fee-918e-e1ee91bf6b33-rabbitmq-confd\") pod \"36088f6e-7c3c-4fee-918e-e1ee91bf6b33\" (UID: \"36088f6e-7c3c-4fee-918e-e1ee91bf6b33\") " Dec 05 17:38:20 crc kubenswrapper[4753]: I1205 17:38:20.757211 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f5652c22-3bf2-454d-a4cf-fd0378f133b8-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "f5652c22-3bf2-454d-a4cf-fd0378f133b8" (UID: "f5652c22-3bf2-454d-a4cf-fd0378f133b8"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:38:20 crc kubenswrapper[4753]: I1205 17:38:20.757981 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/36088f6e-7c3c-4fee-918e-e1ee91bf6b33-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "36088f6e-7c3c-4fee-918e-e1ee91bf6b33" (UID: "36088f6e-7c3c-4fee-918e-e1ee91bf6b33"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:38:20 crc kubenswrapper[4753]: I1205 17:38:20.759761 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/36088f6e-7c3c-4fee-918e-e1ee91bf6b33-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "36088f6e-7c3c-4fee-918e-e1ee91bf6b33" (UID: "36088f6e-7c3c-4fee-918e-e1ee91bf6b33"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:38:20 crc kubenswrapper[4753]: I1205 17:38:20.760535 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5652c22-3bf2-454d-a4cf-fd0378f133b8-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "f5652c22-3bf2-454d-a4cf-fd0378f133b8" (UID: "f5652c22-3bf2-454d-a4cf-fd0378f133b8"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:38:20 crc kubenswrapper[4753]: I1205 17:38:20.765885 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f5652c22-3bf2-454d-a4cf-fd0378f133b8-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "f5652c22-3bf2-454d-a4cf-fd0378f133b8" (UID: "f5652c22-3bf2-454d-a4cf-fd0378f133b8"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:38:20 crc kubenswrapper[4753]: I1205 17:38:20.767242 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f5652c22-3bf2-454d-a4cf-fd0378f133b8-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "f5652c22-3bf2-454d-a4cf-fd0378f133b8" (UID: "f5652c22-3bf2-454d-a4cf-fd0378f133b8"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:38:20 crc kubenswrapper[4753]: I1205 17:38:20.768083 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/36088f6e-7c3c-4fee-918e-e1ee91bf6b33-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "36088f6e-7c3c-4fee-918e-e1ee91bf6b33" (UID: "36088f6e-7c3c-4fee-918e-e1ee91bf6b33"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:38:20 crc kubenswrapper[4753]: I1205 17:38:20.772372 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/36088f6e-7c3c-4fee-918e-e1ee91bf6b33-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "36088f6e-7c3c-4fee-918e-e1ee91bf6b33" (UID: "36088f6e-7c3c-4fee-918e-e1ee91bf6b33"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:38:20 crc kubenswrapper[4753]: I1205 17:38:20.772484 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/36088f6e-7c3c-4fee-918e-e1ee91bf6b33-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "36088f6e-7c3c-4fee-918e-e1ee91bf6b33" (UID: "36088f6e-7c3c-4fee-918e-e1ee91bf6b33"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:38:20 crc kubenswrapper[4753]: I1205 17:38:20.786105 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/36088f6e-7c3c-4fee-918e-e1ee91bf6b33-pod-info" (OuterVolumeSpecName: "pod-info") pod "36088f6e-7c3c-4fee-918e-e1ee91bf6b33" (UID: "36088f6e-7c3c-4fee-918e-e1ee91bf6b33"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 05 17:38:20 crc kubenswrapper[4753]: I1205 17:38:20.789301 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5652c22-3bf2-454d-a4cf-fd0378f133b8-kube-api-access-5m27l" (OuterVolumeSpecName: "kube-api-access-5m27l") pod "f5652c22-3bf2-454d-a4cf-fd0378f133b8" (UID: "f5652c22-3bf2-454d-a4cf-fd0378f133b8"). InnerVolumeSpecName "kube-api-access-5m27l". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:38:20 crc kubenswrapper[4753]: I1205 17:38:20.790384 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5652c22-3bf2-454d-a4cf-fd0378f133b8-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "f5652c22-3bf2-454d-a4cf-fd0378f133b8" (UID: "f5652c22-3bf2-454d-a4cf-fd0378f133b8"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:38:20 crc kubenswrapper[4753]: I1205 17:38:20.792133 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/36088f6e-7c3c-4fee-918e-e1ee91bf6b33-kube-api-access-lskb5" (OuterVolumeSpecName: "kube-api-access-lskb5") pod "36088f6e-7c3c-4fee-918e-e1ee91bf6b33" (UID: "36088f6e-7c3c-4fee-918e-e1ee91bf6b33"). InnerVolumeSpecName "kube-api-access-lskb5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:38:20 crc kubenswrapper[4753]: I1205 17:38:20.808346 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/f5652c22-3bf2-454d-a4cf-fd0378f133b8-pod-info" (OuterVolumeSpecName: "pod-info") pod "f5652c22-3bf2-454d-a4cf-fd0378f133b8" (UID: "f5652c22-3bf2-454d-a4cf-fd0378f133b8"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 05 17:38:20 crc kubenswrapper[4753]: I1205 17:38:20.842752 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-580d11aa-59c8-4f4f-99a0-0bfb50f7476c" (OuterVolumeSpecName: "persistence") pod "f5652c22-3bf2-454d-a4cf-fd0378f133b8" (UID: "f5652c22-3bf2-454d-a4cf-fd0378f133b8"). InnerVolumeSpecName "pvc-580d11aa-59c8-4f4f-99a0-0bfb50f7476c". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 05 17:38:20 crc kubenswrapper[4753]: I1205 17:38:20.846595 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/36088f6e-7c3c-4fee-918e-e1ee91bf6b33-config-data" (OuterVolumeSpecName: "config-data") pod "36088f6e-7c3c-4fee-918e-e1ee91bf6b33" (UID: "36088f6e-7c3c-4fee-918e-e1ee91bf6b33"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:38:20 crc kubenswrapper[4753]: I1205 17:38:20.849956 4753 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/36088f6e-7c3c-4fee-918e-e1ee91bf6b33-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:20 crc kubenswrapper[4753]: I1205 17:38:20.850001 4753 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-580d11aa-59c8-4f4f-99a0-0bfb50f7476c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-580d11aa-59c8-4f4f-99a0-0bfb50f7476c\") on node \"crc\" " Dec 05 17:38:20 crc kubenswrapper[4753]: I1205 17:38:20.850021 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5m27l\" (UniqueName: \"kubernetes.io/projected/f5652c22-3bf2-454d-a4cf-fd0378f133b8-kube-api-access-5m27l\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:20 crc kubenswrapper[4753]: I1205 17:38:20.850034 4753 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/36088f6e-7c3c-4fee-918e-e1ee91bf6b33-pod-info\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:20 crc kubenswrapper[4753]: I1205 17:38:20.850044 4753 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/f5652c22-3bf2-454d-a4cf-fd0378f133b8-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:20 crc kubenswrapper[4753]: I1205 17:38:20.850097 4753 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/36088f6e-7c3c-4fee-918e-e1ee91bf6b33-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:20 crc kubenswrapper[4753]: I1205 17:38:20.850109 4753 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/f5652c22-3bf2-454d-a4cf-fd0378f133b8-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:20 crc kubenswrapper[4753]: I1205 17:38:20.850120 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lskb5\" (UniqueName: \"kubernetes.io/projected/36088f6e-7c3c-4fee-918e-e1ee91bf6b33-kube-api-access-lskb5\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:20 crc kubenswrapper[4753]: I1205 17:38:20.850130 4753 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/36088f6e-7c3c-4fee-918e-e1ee91bf6b33-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:20 crc kubenswrapper[4753]: I1205 17:38:20.850141 4753 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/36088f6e-7c3c-4fee-918e-e1ee91bf6b33-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:20 crc kubenswrapper[4753]: I1205 17:38:20.850161 4753 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/f5652c22-3bf2-454d-a4cf-fd0378f133b8-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:20 crc kubenswrapper[4753]: I1205 17:38:20.850172 4753 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/f5652c22-3bf2-454d-a4cf-fd0378f133b8-pod-info\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:20 crc kubenswrapper[4753]: I1205 17:38:20.850180 4753 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/36088f6e-7c3c-4fee-918e-e1ee91bf6b33-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:20 crc kubenswrapper[4753]: I1205 17:38:20.850191 4753 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/36088f6e-7c3c-4fee-918e-e1ee91bf6b33-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:20 crc kubenswrapper[4753]: I1205 17:38:20.850200 4753 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/f5652c22-3bf2-454d-a4cf-fd0378f133b8-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:20 crc kubenswrapper[4753]: I1205 17:38:20.850209 4753 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/f5652c22-3bf2-454d-a4cf-fd0378f133b8-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:20 crc kubenswrapper[4753]: I1205 17:38:20.853044 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b8025f92-62ab-4c6b-8781-4ac1792398cb" (OuterVolumeSpecName: "persistence") pod "36088f6e-7c3c-4fee-918e-e1ee91bf6b33" (UID: "36088f6e-7c3c-4fee-918e-e1ee91bf6b33"). InnerVolumeSpecName "pvc-b8025f92-62ab-4c6b-8781-4ac1792398cb". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 05 17:38:20 crc kubenswrapper[4753]: I1205 17:38:20.865080 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f5652c22-3bf2-454d-a4cf-fd0378f133b8-config-data" (OuterVolumeSpecName: "config-data") pod "f5652c22-3bf2-454d-a4cf-fd0378f133b8" (UID: "f5652c22-3bf2-454d-a4cf-fd0378f133b8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:38:20 crc kubenswrapper[4753]: I1205 17:38:20.925372 4753 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Dec 05 17:38:20 crc kubenswrapper[4753]: I1205 17:38:20.925782 4753 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-580d11aa-59c8-4f4f-99a0-0bfb50f7476c" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-580d11aa-59c8-4f4f-99a0-0bfb50f7476c") on node "crc" Dec 05 17:38:20 crc kubenswrapper[4753]: I1205 17:38:20.932254 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/36088f6e-7c3c-4fee-918e-e1ee91bf6b33-server-conf" (OuterVolumeSpecName: "server-conf") pod "36088f6e-7c3c-4fee-918e-e1ee91bf6b33" (UID: "36088f6e-7c3c-4fee-918e-e1ee91bf6b33"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:38:20 crc kubenswrapper[4753]: I1205 17:38:20.934531 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f5652c22-3bf2-454d-a4cf-fd0378f133b8-server-conf" (OuterVolumeSpecName: "server-conf") pod "f5652c22-3bf2-454d-a4cf-fd0378f133b8" (UID: "f5652c22-3bf2-454d-a4cf-fd0378f133b8"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:38:20 crc kubenswrapper[4753]: I1205 17:38:20.952649 4753 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/f5652c22-3bf2-454d-a4cf-fd0378f133b8-server-conf\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:20 crc kubenswrapper[4753]: I1205 17:38:20.952711 4753 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-b8025f92-62ab-4c6b-8781-4ac1792398cb\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b8025f92-62ab-4c6b-8781-4ac1792398cb\") on node \"crc\" " Dec 05 17:38:20 crc kubenswrapper[4753]: I1205 17:38:20.952727 4753 reconciler_common.go:293] "Volume detached for volume \"pvc-580d11aa-59c8-4f4f-99a0-0bfb50f7476c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-580d11aa-59c8-4f4f-99a0-0bfb50f7476c\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:20 crc kubenswrapper[4753]: I1205 17:38:20.952738 4753 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/36088f6e-7c3c-4fee-918e-e1ee91bf6b33-server-conf\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:20 crc kubenswrapper[4753]: I1205 17:38:20.952748 4753 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/f5652c22-3bf2-454d-a4cf-fd0378f133b8-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:20 crc kubenswrapper[4753]: I1205 17:38:20.985396 4753 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Dec 05 17:38:20 crc kubenswrapper[4753]: I1205 17:38:20.985911 4753 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-b8025f92-62ab-4c6b-8781-4ac1792398cb" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b8025f92-62ab-4c6b-8781-4ac1792398cb") on node "crc" Dec 05 17:38:20 crc kubenswrapper[4753]: I1205 17:38:20.987050 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/36088f6e-7c3c-4fee-918e-e1ee91bf6b33-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "36088f6e-7c3c-4fee-918e-e1ee91bf6b33" (UID: "36088f6e-7c3c-4fee-918e-e1ee91bf6b33"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.000671 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5652c22-3bf2-454d-a4cf-fd0378f133b8-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "f5652c22-3bf2-454d-a4cf-fd0378f133b8" (UID: "f5652c22-3bf2-454d-a4cf-fd0378f133b8"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.054555 4753 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/f5652c22-3bf2-454d-a4cf-fd0378f133b8-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.054586 4753 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/36088f6e-7c3c-4fee-918e-e1ee91bf6b33-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.054596 4753 reconciler_common.go:293] "Volume detached for volume \"pvc-b8025f92-62ab-4c6b-8781-4ac1792398cb\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b8025f92-62ab-4c6b-8781-4ac1792398cb\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.091198 4753 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="36088f6e-7c3c-4fee-918e-e1ee91bf6b33" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.111:5671: i/o timeout" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.118786 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"f5652c22-3bf2-454d-a4cf-fd0378f133b8","Type":"ContainerDied","Data":"ca2bd45670cff0f53216a3ec6be8bf66adf722857e658a5c60d7a89c6e5d203a"} Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.118853 4753 scope.go:117] "RemoveContainer" containerID="3c247339d27c9ec5ba0733cc0a9a7a3d287053286490b1cea64952a8c4b81b32" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.119234 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.123361 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"36088f6e-7c3c-4fee-918e-e1ee91bf6b33","Type":"ContainerDied","Data":"d8c9c26ee97b7977a5c178567c872a01fb48321a9311a3c46f2d8d115c3bf824"} Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.123478 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.179379 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.205430 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.216004 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.228093 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.243640 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 17:38:21 crc kubenswrapper[4753]: E1205 17:38:21.244106 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36088f6e-7c3c-4fee-918e-e1ee91bf6b33" containerName="setup-container" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.244129 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="36088f6e-7c3c-4fee-918e-e1ee91bf6b33" containerName="setup-container" Dec 05 17:38:21 crc kubenswrapper[4753]: E1205 17:38:21.244157 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5652c22-3bf2-454d-a4cf-fd0378f133b8" containerName="setup-container" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.244164 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5652c22-3bf2-454d-a4cf-fd0378f133b8" containerName="setup-container" Dec 05 17:38:21 crc kubenswrapper[4753]: E1205 17:38:21.244188 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="36088f6e-7c3c-4fee-918e-e1ee91bf6b33" containerName="rabbitmq" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.244194 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="36088f6e-7c3c-4fee-918e-e1ee91bf6b33" containerName="rabbitmq" Dec 05 17:38:21 crc kubenswrapper[4753]: E1205 17:38:21.244205 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5652c22-3bf2-454d-a4cf-fd0378f133b8" containerName="rabbitmq" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.244212 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5652c22-3bf2-454d-a4cf-fd0378f133b8" containerName="rabbitmq" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.244416 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="36088f6e-7c3c-4fee-918e-e1ee91bf6b33" containerName="rabbitmq" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.244441 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5652c22-3bf2-454d-a4cf-fd0378f133b8" containerName="rabbitmq" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.245592 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.248290 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.252728 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.252977 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.253103 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.253238 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-js6jz" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.253327 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.253443 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.263055 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.264847 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.269103 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.269312 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.269414 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.269513 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.269681 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.269798 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.269958 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-lqvgb" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.278734 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.296443 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.369117 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7a1f2600-cf85-45c5-8263-89810b0ba7ce-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"7a1f2600-cf85-45c5-8263-89810b0ba7ce\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.369199 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7a1f2600-cf85-45c5-8263-89810b0ba7ce-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"7a1f2600-cf85-45c5-8263-89810b0ba7ce\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.369247 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7a1f2600-cf85-45c5-8263-89810b0ba7ce-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"7a1f2600-cf85-45c5-8263-89810b0ba7ce\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.369300 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7a1f2600-cf85-45c5-8263-89810b0ba7ce-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"7a1f2600-cf85-45c5-8263-89810b0ba7ce\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.369323 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l8xfj\" (UniqueName: \"kubernetes.io/projected/7a1f2600-cf85-45c5-8263-89810b0ba7ce-kube-api-access-l8xfj\") pod \"rabbitmq-cell1-server-0\" (UID: \"7a1f2600-cf85-45c5-8263-89810b0ba7ce\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.369353 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-580d11aa-59c8-4f4f-99a0-0bfb50f7476c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-580d11aa-59c8-4f4f-99a0-0bfb50f7476c\") pod \"rabbitmq-cell1-server-0\" (UID: \"7a1f2600-cf85-45c5-8263-89810b0ba7ce\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.369401 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7a1f2600-cf85-45c5-8263-89810b0ba7ce-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"7a1f2600-cf85-45c5-8263-89810b0ba7ce\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.369445 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7a1f2600-cf85-45c5-8263-89810b0ba7ce-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"7a1f2600-cf85-45c5-8263-89810b0ba7ce\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.369468 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7a1f2600-cf85-45c5-8263-89810b0ba7ce-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"7a1f2600-cf85-45c5-8263-89810b0ba7ce\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.369491 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7a1f2600-cf85-45c5-8263-89810b0ba7ce-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"7a1f2600-cf85-45c5-8263-89810b0ba7ce\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.369604 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7a1f2600-cf85-45c5-8263-89810b0ba7ce-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"7a1f2600-cf85-45c5-8263-89810b0ba7ce\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.471226 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/2bbb2b1a-5cf9-497c-9471-13ba1314167b-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"2bbb2b1a-5cf9-497c-9471-13ba1314167b\") " pod="openstack/rabbitmq-server-0" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.471294 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/2bbb2b1a-5cf9-497c-9471-13ba1314167b-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"2bbb2b1a-5cf9-497c-9471-13ba1314167b\") " pod="openstack/rabbitmq-server-0" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.471311 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4zmf8\" (UniqueName: \"kubernetes.io/projected/2bbb2b1a-5cf9-497c-9471-13ba1314167b-kube-api-access-4zmf8\") pod \"rabbitmq-server-0\" (UID: \"2bbb2b1a-5cf9-497c-9471-13ba1314167b\") " pod="openstack/rabbitmq-server-0" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.471371 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/2bbb2b1a-5cf9-497c-9471-13ba1314167b-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"2bbb2b1a-5cf9-497c-9471-13ba1314167b\") " pod="openstack/rabbitmq-server-0" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.471524 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/2bbb2b1a-5cf9-497c-9471-13ba1314167b-pod-info\") pod \"rabbitmq-server-0\" (UID: \"2bbb2b1a-5cf9-497c-9471-13ba1314167b\") " pod="openstack/rabbitmq-server-0" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.471589 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/2bbb2b1a-5cf9-497c-9471-13ba1314167b-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"2bbb2b1a-5cf9-497c-9471-13ba1314167b\") " pod="openstack/rabbitmq-server-0" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.471678 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-b8025f92-62ab-4c6b-8781-4ac1792398cb\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b8025f92-62ab-4c6b-8781-4ac1792398cb\") pod \"rabbitmq-server-0\" (UID: \"2bbb2b1a-5cf9-497c-9471-13ba1314167b\") " pod="openstack/rabbitmq-server-0" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.471730 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7a1f2600-cf85-45c5-8263-89810b0ba7ce-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"7a1f2600-cf85-45c5-8263-89810b0ba7ce\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.471803 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7a1f2600-cf85-45c5-8263-89810b0ba7ce-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"7a1f2600-cf85-45c5-8263-89810b0ba7ce\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.471900 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7a1f2600-cf85-45c5-8263-89810b0ba7ce-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"7a1f2600-cf85-45c5-8263-89810b0ba7ce\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.471950 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/2bbb2b1a-5cf9-497c-9471-13ba1314167b-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"2bbb2b1a-5cf9-497c-9471-13ba1314167b\") " pod="openstack/rabbitmq-server-0" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.472045 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7a1f2600-cf85-45c5-8263-89810b0ba7ce-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"7a1f2600-cf85-45c5-8263-89810b0ba7ce\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.472089 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l8xfj\" (UniqueName: \"kubernetes.io/projected/7a1f2600-cf85-45c5-8263-89810b0ba7ce-kube-api-access-l8xfj\") pod \"rabbitmq-cell1-server-0\" (UID: \"7a1f2600-cf85-45c5-8263-89810b0ba7ce\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.472122 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-580d11aa-59c8-4f4f-99a0-0bfb50f7476c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-580d11aa-59c8-4f4f-99a0-0bfb50f7476c\") pod \"rabbitmq-cell1-server-0\" (UID: \"7a1f2600-cf85-45c5-8263-89810b0ba7ce\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.472174 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7a1f2600-cf85-45c5-8263-89810b0ba7ce-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"7a1f2600-cf85-45c5-8263-89810b0ba7ce\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.472203 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7a1f2600-cf85-45c5-8263-89810b0ba7ce-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"7a1f2600-cf85-45c5-8263-89810b0ba7ce\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.472264 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7a1f2600-cf85-45c5-8263-89810b0ba7ce-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"7a1f2600-cf85-45c5-8263-89810b0ba7ce\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.472298 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7a1f2600-cf85-45c5-8263-89810b0ba7ce-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"7a1f2600-cf85-45c5-8263-89810b0ba7ce\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.472324 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2bbb2b1a-5cf9-497c-9471-13ba1314167b-config-data\") pod \"rabbitmq-server-0\" (UID: \"2bbb2b1a-5cf9-497c-9471-13ba1314167b\") " pod="openstack/rabbitmq-server-0" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.472349 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7a1f2600-cf85-45c5-8263-89810b0ba7ce-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"7a1f2600-cf85-45c5-8263-89810b0ba7ce\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.472422 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7a1f2600-cf85-45c5-8263-89810b0ba7ce-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"7a1f2600-cf85-45c5-8263-89810b0ba7ce\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.472494 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/2bbb2b1a-5cf9-497c-9471-13ba1314167b-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"2bbb2b1a-5cf9-497c-9471-13ba1314167b\") " pod="openstack/rabbitmq-server-0" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.472518 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/2bbb2b1a-5cf9-497c-9471-13ba1314167b-server-conf\") pod \"rabbitmq-server-0\" (UID: \"2bbb2b1a-5cf9-497c-9471-13ba1314167b\") " pod="openstack/rabbitmq-server-0" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.472829 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7a1f2600-cf85-45c5-8263-89810b0ba7ce-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"7a1f2600-cf85-45c5-8263-89810b0ba7ce\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.472979 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7a1f2600-cf85-45c5-8263-89810b0ba7ce-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"7a1f2600-cf85-45c5-8263-89810b0ba7ce\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.473335 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7a1f2600-cf85-45c5-8263-89810b0ba7ce-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"7a1f2600-cf85-45c5-8263-89810b0ba7ce\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.474761 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7a1f2600-cf85-45c5-8263-89810b0ba7ce-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"7a1f2600-cf85-45c5-8263-89810b0ba7ce\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.477563 4753 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.477593 4753 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-580d11aa-59c8-4f4f-99a0-0bfb50f7476c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-580d11aa-59c8-4f4f-99a0-0bfb50f7476c\") pod \"rabbitmq-cell1-server-0\" (UID: \"7a1f2600-cf85-45c5-8263-89810b0ba7ce\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/368f5579209aa91fd91b7b3687bd307acc8a6215d480045ca9a7d2574e8a3831/globalmount\"" pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.478695 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7a1f2600-cf85-45c5-8263-89810b0ba7ce-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"7a1f2600-cf85-45c5-8263-89810b0ba7ce\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.478913 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7a1f2600-cf85-45c5-8263-89810b0ba7ce-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"7a1f2600-cf85-45c5-8263-89810b0ba7ce\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.479753 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7a1f2600-cf85-45c5-8263-89810b0ba7ce-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"7a1f2600-cf85-45c5-8263-89810b0ba7ce\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.480838 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7a1f2600-cf85-45c5-8263-89810b0ba7ce-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"7a1f2600-cf85-45c5-8263-89810b0ba7ce\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.494943 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l8xfj\" (UniqueName: \"kubernetes.io/projected/7a1f2600-cf85-45c5-8263-89810b0ba7ce-kube-api-access-l8xfj\") pod \"rabbitmq-cell1-server-0\" (UID: \"7a1f2600-cf85-45c5-8263-89810b0ba7ce\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.534625 4753 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="f5652c22-3bf2-454d-a4cf-fd0378f133b8" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.112:5671: i/o timeout" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.535672 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-580d11aa-59c8-4f4f-99a0-0bfb50f7476c\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-580d11aa-59c8-4f4f-99a0-0bfb50f7476c\") pod \"rabbitmq-cell1-server-0\" (UID: \"7a1f2600-cf85-45c5-8263-89810b0ba7ce\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.574857 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/2bbb2b1a-5cf9-497c-9471-13ba1314167b-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"2bbb2b1a-5cf9-497c-9471-13ba1314167b\") " pod="openstack/rabbitmq-server-0" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.575030 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2bbb2b1a-5cf9-497c-9471-13ba1314167b-config-data\") pod \"rabbitmq-server-0\" (UID: \"2bbb2b1a-5cf9-497c-9471-13ba1314167b\") " pod="openstack/rabbitmq-server-0" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.575135 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/2bbb2b1a-5cf9-497c-9471-13ba1314167b-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"2bbb2b1a-5cf9-497c-9471-13ba1314167b\") " pod="openstack/rabbitmq-server-0" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.575221 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/2bbb2b1a-5cf9-497c-9471-13ba1314167b-server-conf\") pod \"rabbitmq-server-0\" (UID: \"2bbb2b1a-5cf9-497c-9471-13ba1314167b\") " pod="openstack/rabbitmq-server-0" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.575279 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/2bbb2b1a-5cf9-497c-9471-13ba1314167b-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"2bbb2b1a-5cf9-497c-9471-13ba1314167b\") " pod="openstack/rabbitmq-server-0" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.575402 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/2bbb2b1a-5cf9-497c-9471-13ba1314167b-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"2bbb2b1a-5cf9-497c-9471-13ba1314167b\") " pod="openstack/rabbitmq-server-0" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.575437 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4zmf8\" (UniqueName: \"kubernetes.io/projected/2bbb2b1a-5cf9-497c-9471-13ba1314167b-kube-api-access-4zmf8\") pod \"rabbitmq-server-0\" (UID: \"2bbb2b1a-5cf9-497c-9471-13ba1314167b\") " pod="openstack/rabbitmq-server-0" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.575488 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/2bbb2b1a-5cf9-497c-9471-13ba1314167b-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"2bbb2b1a-5cf9-497c-9471-13ba1314167b\") " pod="openstack/rabbitmq-server-0" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.575548 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/2bbb2b1a-5cf9-497c-9471-13ba1314167b-pod-info\") pod \"rabbitmq-server-0\" (UID: \"2bbb2b1a-5cf9-497c-9471-13ba1314167b\") " pod="openstack/rabbitmq-server-0" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.575598 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/2bbb2b1a-5cf9-497c-9471-13ba1314167b-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"2bbb2b1a-5cf9-497c-9471-13ba1314167b\") " pod="openstack/rabbitmq-server-0" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.575671 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-b8025f92-62ab-4c6b-8781-4ac1792398cb\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b8025f92-62ab-4c6b-8781-4ac1792398cb\") pod \"rabbitmq-server-0\" (UID: \"2bbb2b1a-5cf9-497c-9471-13ba1314167b\") " pod="openstack/rabbitmq-server-0" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.576115 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/2bbb2b1a-5cf9-497c-9471-13ba1314167b-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"2bbb2b1a-5cf9-497c-9471-13ba1314167b\") " pod="openstack/rabbitmq-server-0" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.577019 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/2bbb2b1a-5cf9-497c-9471-13ba1314167b-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"2bbb2b1a-5cf9-497c-9471-13ba1314167b\") " pod="openstack/rabbitmq-server-0" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.577874 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2bbb2b1a-5cf9-497c-9471-13ba1314167b-config-data\") pod \"rabbitmq-server-0\" (UID: \"2bbb2b1a-5cf9-497c-9471-13ba1314167b\") " pod="openstack/rabbitmq-server-0" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.578104 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.578714 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/2bbb2b1a-5cf9-497c-9471-13ba1314167b-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"2bbb2b1a-5cf9-497c-9471-13ba1314167b\") " pod="openstack/rabbitmq-server-0" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.578738 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/2bbb2b1a-5cf9-497c-9471-13ba1314167b-server-conf\") pod \"rabbitmq-server-0\" (UID: \"2bbb2b1a-5cf9-497c-9471-13ba1314167b\") " pod="openstack/rabbitmq-server-0" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.580712 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/2bbb2b1a-5cf9-497c-9471-13ba1314167b-pod-info\") pod \"rabbitmq-server-0\" (UID: \"2bbb2b1a-5cf9-497c-9471-13ba1314167b\") " pod="openstack/rabbitmq-server-0" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.582712 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/2bbb2b1a-5cf9-497c-9471-13ba1314167b-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"2bbb2b1a-5cf9-497c-9471-13ba1314167b\") " pod="openstack/rabbitmq-server-0" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.584889 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/2bbb2b1a-5cf9-497c-9471-13ba1314167b-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"2bbb2b1a-5cf9-497c-9471-13ba1314167b\") " pod="openstack/rabbitmq-server-0" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.589030 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/2bbb2b1a-5cf9-497c-9471-13ba1314167b-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"2bbb2b1a-5cf9-497c-9471-13ba1314167b\") " pod="openstack/rabbitmq-server-0" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.594276 4753 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.594321 4753 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-b8025f92-62ab-4c6b-8781-4ac1792398cb\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b8025f92-62ab-4c6b-8781-4ac1792398cb\") pod \"rabbitmq-server-0\" (UID: \"2bbb2b1a-5cf9-497c-9471-13ba1314167b\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/b0f1c3c4ab4522717d685dbc018080e196fc39755508fe7e89724afa9df5553f/globalmount\"" pod="openstack/rabbitmq-server-0" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.595373 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4zmf8\" (UniqueName: \"kubernetes.io/projected/2bbb2b1a-5cf9-497c-9471-13ba1314167b-kube-api-access-4zmf8\") pod \"rabbitmq-server-0\" (UID: \"2bbb2b1a-5cf9-497c-9471-13ba1314167b\") " pod="openstack/rabbitmq-server-0" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.661463 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-b8025f92-62ab-4c6b-8781-4ac1792398cb\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-b8025f92-62ab-4c6b-8781-4ac1792398cb\") pod \"rabbitmq-server-0\" (UID: \"2bbb2b1a-5cf9-497c-9471-13ba1314167b\") " pod="openstack/rabbitmq-server-0" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.739081 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="36088f6e-7c3c-4fee-918e-e1ee91bf6b33" path="/var/lib/kubelet/pods/36088f6e-7c3c-4fee-918e-e1ee91bf6b33/volumes" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.742318 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f5652c22-3bf2-454d-a4cf-fd0378f133b8" path="/var/lib/kubelet/pods/f5652c22-3bf2-454d-a4cf-fd0378f133b8/volumes" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.899023 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-lqvgb" Dec 05 17:38:21 crc kubenswrapper[4753]: I1205 17:38:21.908300 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 17:38:22 crc kubenswrapper[4753]: E1205 17:38:22.246934 4753 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-cloudkitty-api:current-tested" Dec 05 17:38:22 crc kubenswrapper[4753]: E1205 17:38:22.246997 4753 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.rdoproject.org/podified-master-centos10/openstack-cloudkitty-api:current-tested" Dec 05 17:38:22 crc kubenswrapper[4753]: E1205 17:38:22.247205 4753 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cloudkitty-db-sync,Image:quay.rdoproject.org/podified-master-centos10/openstack-cloudkitty-api:current-tested,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CloudKittyPassword,Value:,ValueFrom:&EnvVarSource{FieldRef:nil,ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:&SecretKeySelector{LocalObjectReference:LocalObjectReference{Name:osp-secret,},Key:CloudKittyPassword,Optional:nil,},},},EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/var/lib/openstack/bin,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:cloudkitty-dbsync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:certs,ReadOnly:true,MountPath:/var/lib/openstack/loki-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-5xrdb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42406,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cloudkitty-db-sync-9mzx8_openstack(d52c3870-a8b0-43f1-ac03-2e7c0015a5f7): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 17:38:22 crc kubenswrapper[4753]: E1205 17:38:22.248404 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cloudkitty-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cloudkitty-db-sync-9mzx8" podUID="d52c3870-a8b0-43f1-ac03-2e7c0015a5f7" Dec 05 17:38:22 crc kubenswrapper[4753]: I1205 17:38:22.254426 4753 scope.go:117] "RemoveContainer" containerID="d2b7fe5d9b96c0331922682eb4b32643de6c7d8b758a699b13da54a8a9b025a1" Dec 05 17:38:22 crc kubenswrapper[4753]: I1205 17:38:22.425283 4753 scope.go:117] "RemoveContainer" containerID="10668d64fe2c712c2486402a9956161db31594987a12e3d235797dcb37d29bf6" Dec 05 17:38:22 crc kubenswrapper[4753]: I1205 17:38:22.483585 4753 scope.go:117] "RemoveContainer" containerID="771faf157830e014422c7e771db96a6f041aee98cdf73b6a9d7d87a972150a7c" Dec 05 17:38:22 crc kubenswrapper[4753]: I1205 17:38:22.805567 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-595979776c-hpkz9"] Dec 05 17:38:22 crc kubenswrapper[4753]: I1205 17:38:22.908898 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 17:38:22 crc kubenswrapper[4753]: I1205 17:38:22.926472 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 17:38:23 crc kubenswrapper[4753]: I1205 17:38:23.151843 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5e8ed50a-c2d1-47b1-92e2-db51cc75a4c7","Type":"ContainerStarted","Data":"a5d1432024be194062bbb9f048b53a94540f4051ec2be552f0c145daf91b1292"} Dec 05 17:38:23 crc kubenswrapper[4753]: I1205 17:38:23.153683 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"7a1f2600-cf85-45c5-8263-89810b0ba7ce","Type":"ContainerStarted","Data":"7422f4290287e53ba0f4ba1c604617bd52d6a04c1803cc727598ee7490863646"} Dec 05 17:38:23 crc kubenswrapper[4753]: I1205 17:38:23.154784 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-595979776c-hpkz9" event={"ID":"75929efc-b524-4475-bcbc-e692e3353815","Type":"ContainerStarted","Data":"cdab1ffa5e50926b8ceb3c335da036dea51bb7da257b7991c9d6259708b117e0"} Dec 05 17:38:23 crc kubenswrapper[4753]: I1205 17:38:23.156843 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"2bbb2b1a-5cf9-497c-9471-13ba1314167b","Type":"ContainerStarted","Data":"c14c82c83c131410da69941a42ee30f9bd814ec30b6caff434e187564236a7f5"} Dec 05 17:38:23 crc kubenswrapper[4753]: E1205 17:38:23.157893 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cloudkitty-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.rdoproject.org/podified-master-centos10/openstack-cloudkitty-api:current-tested\\\"\"" pod="openstack/cloudkitty-db-sync-9mzx8" podUID="d52c3870-a8b0-43f1-ac03-2e7c0015a5f7" Dec 05 17:38:24 crc kubenswrapper[4753]: I1205 17:38:24.170500 4753 generic.go:334] "Generic (PLEG): container finished" podID="75929efc-b524-4475-bcbc-e692e3353815" containerID="138a606d100039c72808fa2764714cfc965ed77da936b51a5db53e56e3c4f9aa" exitCode=0 Dec 05 17:38:24 crc kubenswrapper[4753]: I1205 17:38:24.170606 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-595979776c-hpkz9" event={"ID":"75929efc-b524-4475-bcbc-e692e3353815","Type":"ContainerDied","Data":"138a606d100039c72808fa2764714cfc965ed77da936b51a5db53e56e3c4f9aa"} Dec 05 17:38:24 crc kubenswrapper[4753]: I1205 17:38:24.176516 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5e8ed50a-c2d1-47b1-92e2-db51cc75a4c7","Type":"ContainerStarted","Data":"1e5ae0c0438d89187744a40f9e053d8677537f29ced4c65c3bae253c86de54d7"} Dec 05 17:38:25 crc kubenswrapper[4753]: I1205 17:38:25.191953 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5e8ed50a-c2d1-47b1-92e2-db51cc75a4c7","Type":"ContainerStarted","Data":"c37e74bb5a530b6d465824171bc8d6753ed190856f5a309c87cf53965de80211"} Dec 05 17:38:25 crc kubenswrapper[4753]: I1205 17:38:25.193979 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"7a1f2600-cf85-45c5-8263-89810b0ba7ce","Type":"ContainerStarted","Data":"08aab619e28c9d9f7b357b93e6d17d7f57c3ac51b37d5ea5e1902e9d66da0084"} Dec 05 17:38:25 crc kubenswrapper[4753]: I1205 17:38:25.197939 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-595979776c-hpkz9" event={"ID":"75929efc-b524-4475-bcbc-e692e3353815","Type":"ContainerStarted","Data":"3dbc93f1d79974e31407fe13d86f71c94630043d5d9fe374b50bc19e0be24e64"} Dec 05 17:38:25 crc kubenswrapper[4753]: I1205 17:38:25.197998 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-595979776c-hpkz9" Dec 05 17:38:25 crc kubenswrapper[4753]: I1205 17:38:25.201020 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"2bbb2b1a-5cf9-497c-9471-13ba1314167b","Type":"ContainerStarted","Data":"072c97280a4dfad39a7dcaec367afb1acac50c5df2b9aac21a3b511ef7de664c"} Dec 05 17:38:25 crc kubenswrapper[4753]: I1205 17:38:25.267733 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-595979776c-hpkz9" podStartSLOduration=12.267715747 podStartE2EDuration="12.267715747s" podCreationTimestamp="2025-12-05 17:38:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:38:25.266216024 +0000 UTC m=+2043.769323050" watchObservedRunningTime="2025-12-05 17:38:25.267715747 +0000 UTC m=+2043.770822743" Dec 05 17:38:26 crc kubenswrapper[4753]: I1205 17:38:26.216658 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5e8ed50a-c2d1-47b1-92e2-db51cc75a4c7","Type":"ContainerStarted","Data":"c8149aa6e3bcf603ff80dfac400b30216bd59ddce726c6defb80951609fa2c60"} Dec 05 17:38:26 crc kubenswrapper[4753]: I1205 17:38:26.252340 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.511513712 podStartE2EDuration="24.252318396s" podCreationTimestamp="2025-12-05 17:38:02 +0000 UTC" firstStartedPulling="2025-12-05 17:38:03.175251241 +0000 UTC m=+2021.678358247" lastFinishedPulling="2025-12-05 17:38:25.916055925 +0000 UTC m=+2044.419162931" observedRunningTime="2025-12-05 17:38:26.245057001 +0000 UTC m=+2044.748164017" watchObservedRunningTime="2025-12-05 17:38:26.252318396 +0000 UTC m=+2044.755425412" Dec 05 17:38:27 crc kubenswrapper[4753]: I1205 17:38:27.231641 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 17:38:29 crc kubenswrapper[4753]: I1205 17:38:29.082479 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-595979776c-hpkz9" Dec 05 17:38:29 crc kubenswrapper[4753]: I1205 17:38:29.192635 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78468d7767-zhgg5"] Dec 05 17:38:29 crc kubenswrapper[4753]: I1205 17:38:29.192958 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-78468d7767-zhgg5" podUID="79649179-aa76-48b8-ab8d-786db621db69" containerName="dnsmasq-dns" containerID="cri-o://126cfd06ee02ce79fdb9c54438451ffe3748e4ed90f0255d67087a16edc454b4" gracePeriod=10 Dec 05 17:38:29 crc kubenswrapper[4753]: I1205 17:38:29.353229 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5475ccd585-nfqd8"] Dec 05 17:38:29 crc kubenswrapper[4753]: I1205 17:38:29.356760 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5475ccd585-nfqd8" Dec 05 17:38:29 crc kubenswrapper[4753]: I1205 17:38:29.396571 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5475ccd585-nfqd8"] Dec 05 17:38:29 crc kubenswrapper[4753]: I1205 17:38:29.449296 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d81db6a5-e580-4e70-92bb-437f1c03f5b4-config\") pod \"dnsmasq-dns-5475ccd585-nfqd8\" (UID: \"d81db6a5-e580-4e70-92bb-437f1c03f5b4\") " pod="openstack/dnsmasq-dns-5475ccd585-nfqd8" Dec 05 17:38:29 crc kubenswrapper[4753]: I1205 17:38:29.449344 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d81db6a5-e580-4e70-92bb-437f1c03f5b4-ovsdbserver-nb\") pod \"dnsmasq-dns-5475ccd585-nfqd8\" (UID: \"d81db6a5-e580-4e70-92bb-437f1c03f5b4\") " pod="openstack/dnsmasq-dns-5475ccd585-nfqd8" Dec 05 17:38:29 crc kubenswrapper[4753]: I1205 17:38:29.449435 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d81db6a5-e580-4e70-92bb-437f1c03f5b4-dns-swift-storage-0\") pod \"dnsmasq-dns-5475ccd585-nfqd8\" (UID: \"d81db6a5-e580-4e70-92bb-437f1c03f5b4\") " pod="openstack/dnsmasq-dns-5475ccd585-nfqd8" Dec 05 17:38:29 crc kubenswrapper[4753]: I1205 17:38:29.449467 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/d81db6a5-e580-4e70-92bb-437f1c03f5b4-openstack-edpm-ipam\") pod \"dnsmasq-dns-5475ccd585-nfqd8\" (UID: \"d81db6a5-e580-4e70-92bb-437f1c03f5b4\") " pod="openstack/dnsmasq-dns-5475ccd585-nfqd8" Dec 05 17:38:29 crc kubenswrapper[4753]: I1205 17:38:29.449481 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d81db6a5-e580-4e70-92bb-437f1c03f5b4-ovsdbserver-sb\") pod \"dnsmasq-dns-5475ccd585-nfqd8\" (UID: \"d81db6a5-e580-4e70-92bb-437f1c03f5b4\") " pod="openstack/dnsmasq-dns-5475ccd585-nfqd8" Dec 05 17:38:29 crc kubenswrapper[4753]: I1205 17:38:29.449554 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d81db6a5-e580-4e70-92bb-437f1c03f5b4-dns-svc\") pod \"dnsmasq-dns-5475ccd585-nfqd8\" (UID: \"d81db6a5-e580-4e70-92bb-437f1c03f5b4\") " pod="openstack/dnsmasq-dns-5475ccd585-nfqd8" Dec 05 17:38:29 crc kubenswrapper[4753]: I1205 17:38:29.449631 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mtrk4\" (UniqueName: \"kubernetes.io/projected/d81db6a5-e580-4e70-92bb-437f1c03f5b4-kube-api-access-mtrk4\") pod \"dnsmasq-dns-5475ccd585-nfqd8\" (UID: \"d81db6a5-e580-4e70-92bb-437f1c03f5b4\") " pod="openstack/dnsmasq-dns-5475ccd585-nfqd8" Dec 05 17:38:29 crc kubenswrapper[4753]: I1205 17:38:29.551895 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d81db6a5-e580-4e70-92bb-437f1c03f5b4-config\") pod \"dnsmasq-dns-5475ccd585-nfqd8\" (UID: \"d81db6a5-e580-4e70-92bb-437f1c03f5b4\") " pod="openstack/dnsmasq-dns-5475ccd585-nfqd8" Dec 05 17:38:29 crc kubenswrapper[4753]: I1205 17:38:29.551950 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d81db6a5-e580-4e70-92bb-437f1c03f5b4-ovsdbserver-nb\") pod \"dnsmasq-dns-5475ccd585-nfqd8\" (UID: \"d81db6a5-e580-4e70-92bb-437f1c03f5b4\") " pod="openstack/dnsmasq-dns-5475ccd585-nfqd8" Dec 05 17:38:29 crc kubenswrapper[4753]: I1205 17:38:29.552023 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d81db6a5-e580-4e70-92bb-437f1c03f5b4-dns-swift-storage-0\") pod \"dnsmasq-dns-5475ccd585-nfqd8\" (UID: \"d81db6a5-e580-4e70-92bb-437f1c03f5b4\") " pod="openstack/dnsmasq-dns-5475ccd585-nfqd8" Dec 05 17:38:29 crc kubenswrapper[4753]: I1205 17:38:29.552054 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/d81db6a5-e580-4e70-92bb-437f1c03f5b4-openstack-edpm-ipam\") pod \"dnsmasq-dns-5475ccd585-nfqd8\" (UID: \"d81db6a5-e580-4e70-92bb-437f1c03f5b4\") " pod="openstack/dnsmasq-dns-5475ccd585-nfqd8" Dec 05 17:38:29 crc kubenswrapper[4753]: I1205 17:38:29.552071 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d81db6a5-e580-4e70-92bb-437f1c03f5b4-ovsdbserver-sb\") pod \"dnsmasq-dns-5475ccd585-nfqd8\" (UID: \"d81db6a5-e580-4e70-92bb-437f1c03f5b4\") " pod="openstack/dnsmasq-dns-5475ccd585-nfqd8" Dec 05 17:38:29 crc kubenswrapper[4753]: I1205 17:38:29.552116 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d81db6a5-e580-4e70-92bb-437f1c03f5b4-dns-svc\") pod \"dnsmasq-dns-5475ccd585-nfqd8\" (UID: \"d81db6a5-e580-4e70-92bb-437f1c03f5b4\") " pod="openstack/dnsmasq-dns-5475ccd585-nfqd8" Dec 05 17:38:29 crc kubenswrapper[4753]: I1205 17:38:29.552198 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mtrk4\" (UniqueName: \"kubernetes.io/projected/d81db6a5-e580-4e70-92bb-437f1c03f5b4-kube-api-access-mtrk4\") pod \"dnsmasq-dns-5475ccd585-nfqd8\" (UID: \"d81db6a5-e580-4e70-92bb-437f1c03f5b4\") " pod="openstack/dnsmasq-dns-5475ccd585-nfqd8" Dec 05 17:38:29 crc kubenswrapper[4753]: I1205 17:38:29.553257 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d81db6a5-e580-4e70-92bb-437f1c03f5b4-config\") pod \"dnsmasq-dns-5475ccd585-nfqd8\" (UID: \"d81db6a5-e580-4e70-92bb-437f1c03f5b4\") " pod="openstack/dnsmasq-dns-5475ccd585-nfqd8" Dec 05 17:38:29 crc kubenswrapper[4753]: I1205 17:38:29.553769 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d81db6a5-e580-4e70-92bb-437f1c03f5b4-ovsdbserver-nb\") pod \"dnsmasq-dns-5475ccd585-nfqd8\" (UID: \"d81db6a5-e580-4e70-92bb-437f1c03f5b4\") " pod="openstack/dnsmasq-dns-5475ccd585-nfqd8" Dec 05 17:38:29 crc kubenswrapper[4753]: I1205 17:38:29.554831 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/d81db6a5-e580-4e70-92bb-437f1c03f5b4-dns-swift-storage-0\") pod \"dnsmasq-dns-5475ccd585-nfqd8\" (UID: \"d81db6a5-e580-4e70-92bb-437f1c03f5b4\") " pod="openstack/dnsmasq-dns-5475ccd585-nfqd8" Dec 05 17:38:29 crc kubenswrapper[4753]: I1205 17:38:29.555381 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/d81db6a5-e580-4e70-92bb-437f1c03f5b4-openstack-edpm-ipam\") pod \"dnsmasq-dns-5475ccd585-nfqd8\" (UID: \"d81db6a5-e580-4e70-92bb-437f1c03f5b4\") " pod="openstack/dnsmasq-dns-5475ccd585-nfqd8" Dec 05 17:38:29 crc kubenswrapper[4753]: I1205 17:38:29.555871 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d81db6a5-e580-4e70-92bb-437f1c03f5b4-ovsdbserver-sb\") pod \"dnsmasq-dns-5475ccd585-nfqd8\" (UID: \"d81db6a5-e580-4e70-92bb-437f1c03f5b4\") " pod="openstack/dnsmasq-dns-5475ccd585-nfqd8" Dec 05 17:38:29 crc kubenswrapper[4753]: I1205 17:38:29.556379 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d81db6a5-e580-4e70-92bb-437f1c03f5b4-dns-svc\") pod \"dnsmasq-dns-5475ccd585-nfqd8\" (UID: \"d81db6a5-e580-4e70-92bb-437f1c03f5b4\") " pod="openstack/dnsmasq-dns-5475ccd585-nfqd8" Dec 05 17:38:29 crc kubenswrapper[4753]: I1205 17:38:29.584600 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mtrk4\" (UniqueName: \"kubernetes.io/projected/d81db6a5-e580-4e70-92bb-437f1c03f5b4-kube-api-access-mtrk4\") pod \"dnsmasq-dns-5475ccd585-nfqd8\" (UID: \"d81db6a5-e580-4e70-92bb-437f1c03f5b4\") " pod="openstack/dnsmasq-dns-5475ccd585-nfqd8" Dec 05 17:38:29 crc kubenswrapper[4753]: I1205 17:38:29.732233 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5475ccd585-nfqd8" Dec 05 17:38:29 crc kubenswrapper[4753]: I1205 17:38:29.982492 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78468d7767-zhgg5" Dec 05 17:38:30 crc kubenswrapper[4753]: I1205 17:38:30.092489 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/79649179-aa76-48b8-ab8d-786db621db69-ovsdbserver-sb\") pod \"79649179-aa76-48b8-ab8d-786db621db69\" (UID: \"79649179-aa76-48b8-ab8d-786db621db69\") " Dec 05 17:38:30 crc kubenswrapper[4753]: I1205 17:38:30.092580 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/79649179-aa76-48b8-ab8d-786db621db69-dns-svc\") pod \"79649179-aa76-48b8-ab8d-786db621db69\" (UID: \"79649179-aa76-48b8-ab8d-786db621db69\") " Dec 05 17:38:30 crc kubenswrapper[4753]: I1205 17:38:30.092619 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/79649179-aa76-48b8-ab8d-786db621db69-ovsdbserver-nb\") pod \"79649179-aa76-48b8-ab8d-786db621db69\" (UID: \"79649179-aa76-48b8-ab8d-786db621db69\") " Dec 05 17:38:30 crc kubenswrapper[4753]: I1205 17:38:30.092744 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/79649179-aa76-48b8-ab8d-786db621db69-dns-swift-storage-0\") pod \"79649179-aa76-48b8-ab8d-786db621db69\" (UID: \"79649179-aa76-48b8-ab8d-786db621db69\") " Dec 05 17:38:30 crc kubenswrapper[4753]: I1205 17:38:30.092799 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/79649179-aa76-48b8-ab8d-786db621db69-config\") pod \"79649179-aa76-48b8-ab8d-786db621db69\" (UID: \"79649179-aa76-48b8-ab8d-786db621db69\") " Dec 05 17:38:30 crc kubenswrapper[4753]: I1205 17:38:30.092828 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s9hr4\" (UniqueName: \"kubernetes.io/projected/79649179-aa76-48b8-ab8d-786db621db69-kube-api-access-s9hr4\") pod \"79649179-aa76-48b8-ab8d-786db621db69\" (UID: \"79649179-aa76-48b8-ab8d-786db621db69\") " Dec 05 17:38:30 crc kubenswrapper[4753]: I1205 17:38:30.099201 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/79649179-aa76-48b8-ab8d-786db621db69-kube-api-access-s9hr4" (OuterVolumeSpecName: "kube-api-access-s9hr4") pod "79649179-aa76-48b8-ab8d-786db621db69" (UID: "79649179-aa76-48b8-ab8d-786db621db69"). InnerVolumeSpecName "kube-api-access-s9hr4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:38:30 crc kubenswrapper[4753]: I1205 17:38:30.161087 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/79649179-aa76-48b8-ab8d-786db621db69-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "79649179-aa76-48b8-ab8d-786db621db69" (UID: "79649179-aa76-48b8-ab8d-786db621db69"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:38:30 crc kubenswrapper[4753]: I1205 17:38:30.166623 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/79649179-aa76-48b8-ab8d-786db621db69-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "79649179-aa76-48b8-ab8d-786db621db69" (UID: "79649179-aa76-48b8-ab8d-786db621db69"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:38:30 crc kubenswrapper[4753]: I1205 17:38:30.177864 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/79649179-aa76-48b8-ab8d-786db621db69-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "79649179-aa76-48b8-ab8d-786db621db69" (UID: "79649179-aa76-48b8-ab8d-786db621db69"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:38:30 crc kubenswrapper[4753]: I1205 17:38:30.196076 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s9hr4\" (UniqueName: \"kubernetes.io/projected/79649179-aa76-48b8-ab8d-786db621db69-kube-api-access-s9hr4\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:30 crc kubenswrapper[4753]: I1205 17:38:30.196386 4753 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/79649179-aa76-48b8-ab8d-786db621db69-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:30 crc kubenswrapper[4753]: I1205 17:38:30.196466 4753 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/79649179-aa76-48b8-ab8d-786db621db69-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:30 crc kubenswrapper[4753]: I1205 17:38:30.196537 4753 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/79649179-aa76-48b8-ab8d-786db621db69-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:30 crc kubenswrapper[4753]: I1205 17:38:30.201568 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/79649179-aa76-48b8-ab8d-786db621db69-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "79649179-aa76-48b8-ab8d-786db621db69" (UID: "79649179-aa76-48b8-ab8d-786db621db69"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:38:30 crc kubenswrapper[4753]: I1205 17:38:30.205828 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/79649179-aa76-48b8-ab8d-786db621db69-config" (OuterVolumeSpecName: "config") pod "79649179-aa76-48b8-ab8d-786db621db69" (UID: "79649179-aa76-48b8-ab8d-786db621db69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:38:30 crc kubenswrapper[4753]: I1205 17:38:30.264573 4753 generic.go:334] "Generic (PLEG): container finished" podID="79649179-aa76-48b8-ab8d-786db621db69" containerID="126cfd06ee02ce79fdb9c54438451ffe3748e4ed90f0255d67087a16edc454b4" exitCode=0 Dec 05 17:38:30 crc kubenswrapper[4753]: I1205 17:38:30.264629 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78468d7767-zhgg5" event={"ID":"79649179-aa76-48b8-ab8d-786db621db69","Type":"ContainerDied","Data":"126cfd06ee02ce79fdb9c54438451ffe3748e4ed90f0255d67087a16edc454b4"} Dec 05 17:38:30 crc kubenswrapper[4753]: I1205 17:38:30.264664 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78468d7767-zhgg5" event={"ID":"79649179-aa76-48b8-ab8d-786db621db69","Type":"ContainerDied","Data":"fe17b64d05c533329bada400ed5b45fbae229a2f4778bf2b1fde1ea9f83272ff"} Dec 05 17:38:30 crc kubenswrapper[4753]: I1205 17:38:30.264684 4753 scope.go:117] "RemoveContainer" containerID="126cfd06ee02ce79fdb9c54438451ffe3748e4ed90f0255d67087a16edc454b4" Dec 05 17:38:30 crc kubenswrapper[4753]: I1205 17:38:30.264694 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78468d7767-zhgg5" Dec 05 17:38:30 crc kubenswrapper[4753]: I1205 17:38:30.286239 4753 scope.go:117] "RemoveContainer" containerID="cc1e143bf6f8894665b168b9dc2f27259daaaf5dc753daef4adabf6c55f79c66" Dec 05 17:38:30 crc kubenswrapper[4753]: I1205 17:38:30.309402 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78468d7767-zhgg5"] Dec 05 17:38:30 crc kubenswrapper[4753]: I1205 17:38:30.310720 4753 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/79649179-aa76-48b8-ab8d-786db621db69-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:30 crc kubenswrapper[4753]: I1205 17:38:30.310737 4753 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/79649179-aa76-48b8-ab8d-786db621db69-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:30 crc kubenswrapper[4753]: I1205 17:38:30.316444 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78468d7767-zhgg5"] Dec 05 17:38:30 crc kubenswrapper[4753]: I1205 17:38:30.338971 4753 scope.go:117] "RemoveContainer" containerID="126cfd06ee02ce79fdb9c54438451ffe3748e4ed90f0255d67087a16edc454b4" Dec 05 17:38:30 crc kubenswrapper[4753]: E1205 17:38:30.339662 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"126cfd06ee02ce79fdb9c54438451ffe3748e4ed90f0255d67087a16edc454b4\": container with ID starting with 126cfd06ee02ce79fdb9c54438451ffe3748e4ed90f0255d67087a16edc454b4 not found: ID does not exist" containerID="126cfd06ee02ce79fdb9c54438451ffe3748e4ed90f0255d67087a16edc454b4" Dec 05 17:38:30 crc kubenswrapper[4753]: I1205 17:38:30.339799 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"126cfd06ee02ce79fdb9c54438451ffe3748e4ed90f0255d67087a16edc454b4"} err="failed to get container status \"126cfd06ee02ce79fdb9c54438451ffe3748e4ed90f0255d67087a16edc454b4\": rpc error: code = NotFound desc = could not find container \"126cfd06ee02ce79fdb9c54438451ffe3748e4ed90f0255d67087a16edc454b4\": container with ID starting with 126cfd06ee02ce79fdb9c54438451ffe3748e4ed90f0255d67087a16edc454b4 not found: ID does not exist" Dec 05 17:38:30 crc kubenswrapper[4753]: I1205 17:38:30.339921 4753 scope.go:117] "RemoveContainer" containerID="cc1e143bf6f8894665b168b9dc2f27259daaaf5dc753daef4adabf6c55f79c66" Dec 05 17:38:30 crc kubenswrapper[4753]: E1205 17:38:30.340621 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cc1e143bf6f8894665b168b9dc2f27259daaaf5dc753daef4adabf6c55f79c66\": container with ID starting with cc1e143bf6f8894665b168b9dc2f27259daaaf5dc753daef4adabf6c55f79c66 not found: ID does not exist" containerID="cc1e143bf6f8894665b168b9dc2f27259daaaf5dc753daef4adabf6c55f79c66" Dec 05 17:38:30 crc kubenswrapper[4753]: I1205 17:38:30.340674 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cc1e143bf6f8894665b168b9dc2f27259daaaf5dc753daef4adabf6c55f79c66"} err="failed to get container status \"cc1e143bf6f8894665b168b9dc2f27259daaaf5dc753daef4adabf6c55f79c66\": rpc error: code = NotFound desc = could not find container \"cc1e143bf6f8894665b168b9dc2f27259daaaf5dc753daef4adabf6c55f79c66\": container with ID starting with cc1e143bf6f8894665b168b9dc2f27259daaaf5dc753daef4adabf6c55f79c66 not found: ID does not exist" Dec 05 17:38:30 crc kubenswrapper[4753]: I1205 17:38:30.376332 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5475ccd585-nfqd8"] Dec 05 17:38:31 crc kubenswrapper[4753]: I1205 17:38:31.281587 4753 generic.go:334] "Generic (PLEG): container finished" podID="d81db6a5-e580-4e70-92bb-437f1c03f5b4" containerID="a568a76805f93e80114047b1c709e644c0356b899acb8f6bace067ad7e50a749" exitCode=0 Dec 05 17:38:31 crc kubenswrapper[4753]: I1205 17:38:31.281687 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5475ccd585-nfqd8" event={"ID":"d81db6a5-e580-4e70-92bb-437f1c03f5b4","Type":"ContainerDied","Data":"a568a76805f93e80114047b1c709e644c0356b899acb8f6bace067ad7e50a749"} Dec 05 17:38:31 crc kubenswrapper[4753]: I1205 17:38:31.281916 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5475ccd585-nfqd8" event={"ID":"d81db6a5-e580-4e70-92bb-437f1c03f5b4","Type":"ContainerStarted","Data":"3ba5bf35d0b71b0c88e85a022380365ca9fb0f43280b10ac5832fa5defe3c230"} Dec 05 17:38:31 crc kubenswrapper[4753]: I1205 17:38:31.734666 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="79649179-aa76-48b8-ab8d-786db621db69" path="/var/lib/kubelet/pods/79649179-aa76-48b8-ab8d-786db621db69/volumes" Dec 05 17:38:32 crc kubenswrapper[4753]: I1205 17:38:32.295057 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5475ccd585-nfqd8" event={"ID":"d81db6a5-e580-4e70-92bb-437f1c03f5b4","Type":"ContainerStarted","Data":"e9fca33db9a0654b2c8016c792b90b86d5e77bdeffc75b2e4f8cb8d13d2bfa5f"} Dec 05 17:38:32 crc kubenswrapper[4753]: I1205 17:38:32.295423 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5475ccd585-nfqd8" Dec 05 17:38:32 crc kubenswrapper[4753]: I1205 17:38:32.328708 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5475ccd585-nfqd8" podStartSLOduration=3.328677822 podStartE2EDuration="3.328677822s" podCreationTimestamp="2025-12-05 17:38:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:38:32.319379299 +0000 UTC m=+2050.822486325" watchObservedRunningTime="2025-12-05 17:38:32.328677822 +0000 UTC m=+2050.831784828" Dec 05 17:38:34 crc kubenswrapper[4753]: I1205 17:38:34.926684 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 05 17:38:35 crc kubenswrapper[4753]: I1205 17:38:35.325545 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-sync-9mzx8" event={"ID":"d52c3870-a8b0-43f1-ac03-2e7c0015a5f7","Type":"ContainerStarted","Data":"e44ccefb45d57b43018522f1a15e6bc64717eb635b58390e86a3a618e417f280"} Dec 05 17:38:35 crc kubenswrapper[4753]: I1205 17:38:35.350783 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-db-sync-9mzx8" podStartSLOduration=2.206003517 podStartE2EDuration="38.350760584s" podCreationTimestamp="2025-12-05 17:37:57 +0000 UTC" firstStartedPulling="2025-12-05 17:37:58.779441162 +0000 UTC m=+2017.282548188" lastFinishedPulling="2025-12-05 17:38:34.924198249 +0000 UTC m=+2053.427305255" observedRunningTime="2025-12-05 17:38:35.348309135 +0000 UTC m=+2053.851416151" watchObservedRunningTime="2025-12-05 17:38:35.350760584 +0000 UTC m=+2053.853867590" Dec 05 17:38:37 crc kubenswrapper[4753]: I1205 17:38:37.361950 4753 generic.go:334] "Generic (PLEG): container finished" podID="d52c3870-a8b0-43f1-ac03-2e7c0015a5f7" containerID="e44ccefb45d57b43018522f1a15e6bc64717eb635b58390e86a3a618e417f280" exitCode=0 Dec 05 17:38:37 crc kubenswrapper[4753]: I1205 17:38:37.364031 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-sync-9mzx8" event={"ID":"d52c3870-a8b0-43f1-ac03-2e7c0015a5f7","Type":"ContainerDied","Data":"e44ccefb45d57b43018522f1a15e6bc64717eb635b58390e86a3a618e417f280"} Dec 05 17:38:38 crc kubenswrapper[4753]: I1205 17:38:38.963427 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-sync-9mzx8" Dec 05 17:38:39 crc kubenswrapper[4753]: I1205 17:38:39.138146 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d52c3870-a8b0-43f1-ac03-2e7c0015a5f7-combined-ca-bundle\") pod \"d52c3870-a8b0-43f1-ac03-2e7c0015a5f7\" (UID: \"d52c3870-a8b0-43f1-ac03-2e7c0015a5f7\") " Dec 05 17:38:39 crc kubenswrapper[4753]: I1205 17:38:39.138733 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d52c3870-a8b0-43f1-ac03-2e7c0015a5f7-scripts\") pod \"d52c3870-a8b0-43f1-ac03-2e7c0015a5f7\" (UID: \"d52c3870-a8b0-43f1-ac03-2e7c0015a5f7\") " Dec 05 17:38:39 crc kubenswrapper[4753]: I1205 17:38:39.138994 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5xrdb\" (UniqueName: \"kubernetes.io/projected/d52c3870-a8b0-43f1-ac03-2e7c0015a5f7-kube-api-access-5xrdb\") pod \"d52c3870-a8b0-43f1-ac03-2e7c0015a5f7\" (UID: \"d52c3870-a8b0-43f1-ac03-2e7c0015a5f7\") " Dec 05 17:38:39 crc kubenswrapper[4753]: I1205 17:38:39.139083 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/d52c3870-a8b0-43f1-ac03-2e7c0015a5f7-certs\") pod \"d52c3870-a8b0-43f1-ac03-2e7c0015a5f7\" (UID: \"d52c3870-a8b0-43f1-ac03-2e7c0015a5f7\") " Dec 05 17:38:39 crc kubenswrapper[4753]: I1205 17:38:39.139388 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d52c3870-a8b0-43f1-ac03-2e7c0015a5f7-config-data\") pod \"d52c3870-a8b0-43f1-ac03-2e7c0015a5f7\" (UID: \"d52c3870-a8b0-43f1-ac03-2e7c0015a5f7\") " Dec 05 17:38:39 crc kubenswrapper[4753]: I1205 17:38:39.145020 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d52c3870-a8b0-43f1-ac03-2e7c0015a5f7-certs" (OuterVolumeSpecName: "certs") pod "d52c3870-a8b0-43f1-ac03-2e7c0015a5f7" (UID: "d52c3870-a8b0-43f1-ac03-2e7c0015a5f7"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:38:39 crc kubenswrapper[4753]: I1205 17:38:39.145267 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d52c3870-a8b0-43f1-ac03-2e7c0015a5f7-scripts" (OuterVolumeSpecName: "scripts") pod "d52c3870-a8b0-43f1-ac03-2e7c0015a5f7" (UID: "d52c3870-a8b0-43f1-ac03-2e7c0015a5f7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:38:39 crc kubenswrapper[4753]: I1205 17:38:39.146502 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d52c3870-a8b0-43f1-ac03-2e7c0015a5f7-kube-api-access-5xrdb" (OuterVolumeSpecName: "kube-api-access-5xrdb") pod "d52c3870-a8b0-43f1-ac03-2e7c0015a5f7" (UID: "d52c3870-a8b0-43f1-ac03-2e7c0015a5f7"). InnerVolumeSpecName "kube-api-access-5xrdb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:38:39 crc kubenswrapper[4753]: I1205 17:38:39.181199 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d52c3870-a8b0-43f1-ac03-2e7c0015a5f7-config-data" (OuterVolumeSpecName: "config-data") pod "d52c3870-a8b0-43f1-ac03-2e7c0015a5f7" (UID: "d52c3870-a8b0-43f1-ac03-2e7c0015a5f7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:38:39 crc kubenswrapper[4753]: I1205 17:38:39.210073 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d52c3870-a8b0-43f1-ac03-2e7c0015a5f7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d52c3870-a8b0-43f1-ac03-2e7c0015a5f7" (UID: "d52c3870-a8b0-43f1-ac03-2e7c0015a5f7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:38:39 crc kubenswrapper[4753]: I1205 17:38:39.243009 4753 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/projected/d52c3870-a8b0-43f1-ac03-2e7c0015a5f7-certs\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:39 crc kubenswrapper[4753]: I1205 17:38:39.243051 4753 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d52c3870-a8b0-43f1-ac03-2e7c0015a5f7-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:39 crc kubenswrapper[4753]: I1205 17:38:39.243066 4753 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d52c3870-a8b0-43f1-ac03-2e7c0015a5f7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:39 crc kubenswrapper[4753]: I1205 17:38:39.243081 4753 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d52c3870-a8b0-43f1-ac03-2e7c0015a5f7-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:39 crc kubenswrapper[4753]: I1205 17:38:39.243096 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5xrdb\" (UniqueName: \"kubernetes.io/projected/d52c3870-a8b0-43f1-ac03-2e7c0015a5f7-kube-api-access-5xrdb\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:39 crc kubenswrapper[4753]: I1205 17:38:39.387587 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-db-sync-9mzx8" event={"ID":"d52c3870-a8b0-43f1-ac03-2e7c0015a5f7","Type":"ContainerDied","Data":"3f4792723a1568baa512ca4bba6c954156e961aca55d171bc184ae10fde10017"} Dec 05 17:38:39 crc kubenswrapper[4753]: I1205 17:38:39.387644 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3f4792723a1568baa512ca4bba6c954156e961aca55d171bc184ae10fde10017" Dec 05 17:38:39 crc kubenswrapper[4753]: I1205 17:38:39.387739 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-db-sync-9mzx8" Dec 05 17:38:39 crc kubenswrapper[4753]: I1205 17:38:39.511765 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-storageinit-k8xt5"] Dec 05 17:38:39 crc kubenswrapper[4753]: I1205 17:38:39.525942 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cloudkitty-storageinit-k8xt5"] Dec 05 17:38:39 crc kubenswrapper[4753]: I1205 17:38:39.594045 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-storageinit-sf7pd"] Dec 05 17:38:39 crc kubenswrapper[4753]: E1205 17:38:39.594598 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d52c3870-a8b0-43f1-ac03-2e7c0015a5f7" containerName="cloudkitty-db-sync" Dec 05 17:38:39 crc kubenswrapper[4753]: I1205 17:38:39.594616 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="d52c3870-a8b0-43f1-ac03-2e7c0015a5f7" containerName="cloudkitty-db-sync" Dec 05 17:38:39 crc kubenswrapper[4753]: E1205 17:38:39.594640 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79649179-aa76-48b8-ab8d-786db621db69" containerName="dnsmasq-dns" Dec 05 17:38:39 crc kubenswrapper[4753]: I1205 17:38:39.594646 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="79649179-aa76-48b8-ab8d-786db621db69" containerName="dnsmasq-dns" Dec 05 17:38:39 crc kubenswrapper[4753]: E1205 17:38:39.594668 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79649179-aa76-48b8-ab8d-786db621db69" containerName="init" Dec 05 17:38:39 crc kubenswrapper[4753]: I1205 17:38:39.594675 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="79649179-aa76-48b8-ab8d-786db621db69" containerName="init" Dec 05 17:38:39 crc kubenswrapper[4753]: I1205 17:38:39.594902 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="79649179-aa76-48b8-ab8d-786db621db69" containerName="dnsmasq-dns" Dec 05 17:38:39 crc kubenswrapper[4753]: I1205 17:38:39.594935 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="d52c3870-a8b0-43f1-ac03-2e7c0015a5f7" containerName="cloudkitty-db-sync" Dec 05 17:38:39 crc kubenswrapper[4753]: I1205 17:38:39.595895 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-storageinit-sf7pd" Dec 05 17:38:39 crc kubenswrapper[4753]: I1205 17:38:39.598539 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 05 17:38:39 crc kubenswrapper[4753]: I1205 17:38:39.652508 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-storageinit-sf7pd"] Dec 05 17:38:39 crc kubenswrapper[4753]: I1205 17:38:39.736808 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="855d7063-d23a-4a28-a652-e78ae24fcc27" path="/var/lib/kubelet/pods/855d7063-d23a-4a28-a652-e78ae24fcc27/volumes" Dec 05 17:38:39 crc kubenswrapper[4753]: I1205 17:38:39.739341 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5475ccd585-nfqd8" Dec 05 17:38:39 crc kubenswrapper[4753]: I1205 17:38:39.754513 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/369dd235-0113-4868-9146-9dcc60c10f63-scripts\") pod \"cloudkitty-storageinit-sf7pd\" (UID: \"369dd235-0113-4868-9146-9dcc60c10f63\") " pod="openstack/cloudkitty-storageinit-sf7pd" Dec 05 17:38:39 crc kubenswrapper[4753]: I1205 17:38:39.754733 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fxkm6\" (UniqueName: \"kubernetes.io/projected/369dd235-0113-4868-9146-9dcc60c10f63-kube-api-access-fxkm6\") pod \"cloudkitty-storageinit-sf7pd\" (UID: \"369dd235-0113-4868-9146-9dcc60c10f63\") " pod="openstack/cloudkitty-storageinit-sf7pd" Dec 05 17:38:39 crc kubenswrapper[4753]: I1205 17:38:39.755000 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/369dd235-0113-4868-9146-9dcc60c10f63-combined-ca-bundle\") pod \"cloudkitty-storageinit-sf7pd\" (UID: \"369dd235-0113-4868-9146-9dcc60c10f63\") " pod="openstack/cloudkitty-storageinit-sf7pd" Dec 05 17:38:39 crc kubenswrapper[4753]: I1205 17:38:39.755073 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/369dd235-0113-4868-9146-9dcc60c10f63-config-data\") pod \"cloudkitty-storageinit-sf7pd\" (UID: \"369dd235-0113-4868-9146-9dcc60c10f63\") " pod="openstack/cloudkitty-storageinit-sf7pd" Dec 05 17:38:39 crc kubenswrapper[4753]: I1205 17:38:39.755312 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/369dd235-0113-4868-9146-9dcc60c10f63-certs\") pod \"cloudkitty-storageinit-sf7pd\" (UID: \"369dd235-0113-4868-9146-9dcc60c10f63\") " pod="openstack/cloudkitty-storageinit-sf7pd" Dec 05 17:38:39 crc kubenswrapper[4753]: I1205 17:38:39.811858 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-595979776c-hpkz9"] Dec 05 17:38:39 crc kubenswrapper[4753]: I1205 17:38:39.815641 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-595979776c-hpkz9" podUID="75929efc-b524-4475-bcbc-e692e3353815" containerName="dnsmasq-dns" containerID="cri-o://3dbc93f1d79974e31407fe13d86f71c94630043d5d9fe374b50bc19e0be24e64" gracePeriod=10 Dec 05 17:38:39 crc kubenswrapper[4753]: I1205 17:38:39.857743 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/369dd235-0113-4868-9146-9dcc60c10f63-scripts\") pod \"cloudkitty-storageinit-sf7pd\" (UID: \"369dd235-0113-4868-9146-9dcc60c10f63\") " pod="openstack/cloudkitty-storageinit-sf7pd" Dec 05 17:38:39 crc kubenswrapper[4753]: I1205 17:38:39.858071 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fxkm6\" (UniqueName: \"kubernetes.io/projected/369dd235-0113-4868-9146-9dcc60c10f63-kube-api-access-fxkm6\") pod \"cloudkitty-storageinit-sf7pd\" (UID: \"369dd235-0113-4868-9146-9dcc60c10f63\") " pod="openstack/cloudkitty-storageinit-sf7pd" Dec 05 17:38:39 crc kubenswrapper[4753]: I1205 17:38:39.858271 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/369dd235-0113-4868-9146-9dcc60c10f63-combined-ca-bundle\") pod \"cloudkitty-storageinit-sf7pd\" (UID: \"369dd235-0113-4868-9146-9dcc60c10f63\") " pod="openstack/cloudkitty-storageinit-sf7pd" Dec 05 17:38:39 crc kubenswrapper[4753]: I1205 17:38:39.858392 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/369dd235-0113-4868-9146-9dcc60c10f63-config-data\") pod \"cloudkitty-storageinit-sf7pd\" (UID: \"369dd235-0113-4868-9146-9dcc60c10f63\") " pod="openstack/cloudkitty-storageinit-sf7pd" Dec 05 17:38:39 crc kubenswrapper[4753]: I1205 17:38:39.858583 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/369dd235-0113-4868-9146-9dcc60c10f63-certs\") pod \"cloudkitty-storageinit-sf7pd\" (UID: \"369dd235-0113-4868-9146-9dcc60c10f63\") " pod="openstack/cloudkitty-storageinit-sf7pd" Dec 05 17:38:39 crc kubenswrapper[4753]: I1205 17:38:39.862524 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/369dd235-0113-4868-9146-9dcc60c10f63-scripts\") pod \"cloudkitty-storageinit-sf7pd\" (UID: \"369dd235-0113-4868-9146-9dcc60c10f63\") " pod="openstack/cloudkitty-storageinit-sf7pd" Dec 05 17:38:39 crc kubenswrapper[4753]: I1205 17:38:39.862894 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/369dd235-0113-4868-9146-9dcc60c10f63-combined-ca-bundle\") pod \"cloudkitty-storageinit-sf7pd\" (UID: \"369dd235-0113-4868-9146-9dcc60c10f63\") " pod="openstack/cloudkitty-storageinit-sf7pd" Dec 05 17:38:39 crc kubenswrapper[4753]: I1205 17:38:39.864690 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/369dd235-0113-4868-9146-9dcc60c10f63-config-data\") pod \"cloudkitty-storageinit-sf7pd\" (UID: \"369dd235-0113-4868-9146-9dcc60c10f63\") " pod="openstack/cloudkitty-storageinit-sf7pd" Dec 05 17:38:39 crc kubenswrapper[4753]: I1205 17:38:39.866473 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/projected/369dd235-0113-4868-9146-9dcc60c10f63-certs\") pod \"cloudkitty-storageinit-sf7pd\" (UID: \"369dd235-0113-4868-9146-9dcc60c10f63\") " pod="openstack/cloudkitty-storageinit-sf7pd" Dec 05 17:38:39 crc kubenswrapper[4753]: I1205 17:38:39.877911 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fxkm6\" (UniqueName: \"kubernetes.io/projected/369dd235-0113-4868-9146-9dcc60c10f63-kube-api-access-fxkm6\") pod \"cloudkitty-storageinit-sf7pd\" (UID: \"369dd235-0113-4868-9146-9dcc60c10f63\") " pod="openstack/cloudkitty-storageinit-sf7pd" Dec 05 17:38:39 crc kubenswrapper[4753]: I1205 17:38:39.977434 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-storageinit-sf7pd" Dec 05 17:38:40 crc kubenswrapper[4753]: I1205 17:38:40.315966 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-595979776c-hpkz9" Dec 05 17:38:40 crc kubenswrapper[4753]: I1205 17:38:40.401021 4753 generic.go:334] "Generic (PLEG): container finished" podID="75929efc-b524-4475-bcbc-e692e3353815" containerID="3dbc93f1d79974e31407fe13d86f71c94630043d5d9fe374b50bc19e0be24e64" exitCode=0 Dec 05 17:38:40 crc kubenswrapper[4753]: I1205 17:38:40.401058 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-595979776c-hpkz9" event={"ID":"75929efc-b524-4475-bcbc-e692e3353815","Type":"ContainerDied","Data":"3dbc93f1d79974e31407fe13d86f71c94630043d5d9fe374b50bc19e0be24e64"} Dec 05 17:38:40 crc kubenswrapper[4753]: I1205 17:38:40.401098 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-595979776c-hpkz9" Dec 05 17:38:40 crc kubenswrapper[4753]: I1205 17:38:40.401110 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-595979776c-hpkz9" event={"ID":"75929efc-b524-4475-bcbc-e692e3353815","Type":"ContainerDied","Data":"cdab1ffa5e50926b8ceb3c335da036dea51bb7da257b7991c9d6259708b117e0"} Dec 05 17:38:40 crc kubenswrapper[4753]: I1205 17:38:40.401157 4753 scope.go:117] "RemoveContainer" containerID="3dbc93f1d79974e31407fe13d86f71c94630043d5d9fe374b50bc19e0be24e64" Dec 05 17:38:40 crc kubenswrapper[4753]: I1205 17:38:40.430255 4753 scope.go:117] "RemoveContainer" containerID="138a606d100039c72808fa2764714cfc965ed77da936b51a5db53e56e3c4f9aa" Dec 05 17:38:40 crc kubenswrapper[4753]: I1205 17:38:40.456017 4753 scope.go:117] "RemoveContainer" containerID="3dbc93f1d79974e31407fe13d86f71c94630043d5d9fe374b50bc19e0be24e64" Dec 05 17:38:40 crc kubenswrapper[4753]: E1205 17:38:40.456580 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3dbc93f1d79974e31407fe13d86f71c94630043d5d9fe374b50bc19e0be24e64\": container with ID starting with 3dbc93f1d79974e31407fe13d86f71c94630043d5d9fe374b50bc19e0be24e64 not found: ID does not exist" containerID="3dbc93f1d79974e31407fe13d86f71c94630043d5d9fe374b50bc19e0be24e64" Dec 05 17:38:40 crc kubenswrapper[4753]: I1205 17:38:40.456613 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3dbc93f1d79974e31407fe13d86f71c94630043d5d9fe374b50bc19e0be24e64"} err="failed to get container status \"3dbc93f1d79974e31407fe13d86f71c94630043d5d9fe374b50bc19e0be24e64\": rpc error: code = NotFound desc = could not find container \"3dbc93f1d79974e31407fe13d86f71c94630043d5d9fe374b50bc19e0be24e64\": container with ID starting with 3dbc93f1d79974e31407fe13d86f71c94630043d5d9fe374b50bc19e0be24e64 not found: ID does not exist" Dec 05 17:38:40 crc kubenswrapper[4753]: I1205 17:38:40.456637 4753 scope.go:117] "RemoveContainer" containerID="138a606d100039c72808fa2764714cfc965ed77da936b51a5db53e56e3c4f9aa" Dec 05 17:38:40 crc kubenswrapper[4753]: E1205 17:38:40.456979 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"138a606d100039c72808fa2764714cfc965ed77da936b51a5db53e56e3c4f9aa\": container with ID starting with 138a606d100039c72808fa2764714cfc965ed77da936b51a5db53e56e3c4f9aa not found: ID does not exist" containerID="138a606d100039c72808fa2764714cfc965ed77da936b51a5db53e56e3c4f9aa" Dec 05 17:38:40 crc kubenswrapper[4753]: I1205 17:38:40.457003 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"138a606d100039c72808fa2764714cfc965ed77da936b51a5db53e56e3c4f9aa"} err="failed to get container status \"138a606d100039c72808fa2764714cfc965ed77da936b51a5db53e56e3c4f9aa\": rpc error: code = NotFound desc = could not find container \"138a606d100039c72808fa2764714cfc965ed77da936b51a5db53e56e3c4f9aa\": container with ID starting with 138a606d100039c72808fa2764714cfc965ed77da936b51a5db53e56e3c4f9aa not found: ID does not exist" Dec 05 17:38:40 crc kubenswrapper[4753]: I1205 17:38:40.469959 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/75929efc-b524-4475-bcbc-e692e3353815-dns-svc\") pod \"75929efc-b524-4475-bcbc-e692e3353815\" (UID: \"75929efc-b524-4475-bcbc-e692e3353815\") " Dec 05 17:38:40 crc kubenswrapper[4753]: I1205 17:38:40.470051 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/75929efc-b524-4475-bcbc-e692e3353815-ovsdbserver-sb\") pod \"75929efc-b524-4475-bcbc-e692e3353815\" (UID: \"75929efc-b524-4475-bcbc-e692e3353815\") " Dec 05 17:38:40 crc kubenswrapper[4753]: I1205 17:38:40.470258 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2dmxr\" (UniqueName: \"kubernetes.io/projected/75929efc-b524-4475-bcbc-e692e3353815-kube-api-access-2dmxr\") pod \"75929efc-b524-4475-bcbc-e692e3353815\" (UID: \"75929efc-b524-4475-bcbc-e692e3353815\") " Dec 05 17:38:40 crc kubenswrapper[4753]: I1205 17:38:40.470307 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/75929efc-b524-4475-bcbc-e692e3353815-dns-swift-storage-0\") pod \"75929efc-b524-4475-bcbc-e692e3353815\" (UID: \"75929efc-b524-4475-bcbc-e692e3353815\") " Dec 05 17:38:40 crc kubenswrapper[4753]: I1205 17:38:40.470330 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/75929efc-b524-4475-bcbc-e692e3353815-openstack-edpm-ipam\") pod \"75929efc-b524-4475-bcbc-e692e3353815\" (UID: \"75929efc-b524-4475-bcbc-e692e3353815\") " Dec 05 17:38:40 crc kubenswrapper[4753]: I1205 17:38:40.470350 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/75929efc-b524-4475-bcbc-e692e3353815-config\") pod \"75929efc-b524-4475-bcbc-e692e3353815\" (UID: \"75929efc-b524-4475-bcbc-e692e3353815\") " Dec 05 17:38:40 crc kubenswrapper[4753]: I1205 17:38:40.470387 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/75929efc-b524-4475-bcbc-e692e3353815-ovsdbserver-nb\") pod \"75929efc-b524-4475-bcbc-e692e3353815\" (UID: \"75929efc-b524-4475-bcbc-e692e3353815\") " Dec 05 17:38:40 crc kubenswrapper[4753]: I1205 17:38:40.490909 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/75929efc-b524-4475-bcbc-e692e3353815-kube-api-access-2dmxr" (OuterVolumeSpecName: "kube-api-access-2dmxr") pod "75929efc-b524-4475-bcbc-e692e3353815" (UID: "75929efc-b524-4475-bcbc-e692e3353815"). InnerVolumeSpecName "kube-api-access-2dmxr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:38:40 crc kubenswrapper[4753]: I1205 17:38:40.529749 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/75929efc-b524-4475-bcbc-e692e3353815-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "75929efc-b524-4475-bcbc-e692e3353815" (UID: "75929efc-b524-4475-bcbc-e692e3353815"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:38:40 crc kubenswrapper[4753]: I1205 17:38:40.546278 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/75929efc-b524-4475-bcbc-e692e3353815-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "75929efc-b524-4475-bcbc-e692e3353815" (UID: "75929efc-b524-4475-bcbc-e692e3353815"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:38:40 crc kubenswrapper[4753]: I1205 17:38:40.557302 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/75929efc-b524-4475-bcbc-e692e3353815-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "75929efc-b524-4475-bcbc-e692e3353815" (UID: "75929efc-b524-4475-bcbc-e692e3353815"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:38:40 crc kubenswrapper[4753]: I1205 17:38:40.573070 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2dmxr\" (UniqueName: \"kubernetes.io/projected/75929efc-b524-4475-bcbc-e692e3353815-kube-api-access-2dmxr\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:40 crc kubenswrapper[4753]: I1205 17:38:40.573102 4753 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/75929efc-b524-4475-bcbc-e692e3353815-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:40 crc kubenswrapper[4753]: I1205 17:38:40.573111 4753 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/75929efc-b524-4475-bcbc-e692e3353815-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:40 crc kubenswrapper[4753]: I1205 17:38:40.573120 4753 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/75929efc-b524-4475-bcbc-e692e3353815-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:40 crc kubenswrapper[4753]: I1205 17:38:40.573812 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/75929efc-b524-4475-bcbc-e692e3353815-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "75929efc-b524-4475-bcbc-e692e3353815" (UID: "75929efc-b524-4475-bcbc-e692e3353815"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:38:40 crc kubenswrapper[4753]: I1205 17:38:40.577780 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/75929efc-b524-4475-bcbc-e692e3353815-config" (OuterVolumeSpecName: "config") pod "75929efc-b524-4475-bcbc-e692e3353815" (UID: "75929efc-b524-4475-bcbc-e692e3353815"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:38:40 crc kubenswrapper[4753]: I1205 17:38:40.581592 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/75929efc-b524-4475-bcbc-e692e3353815-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "75929efc-b524-4475-bcbc-e692e3353815" (UID: "75929efc-b524-4475-bcbc-e692e3353815"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:38:40 crc kubenswrapper[4753]: W1205 17:38:40.618442 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod369dd235_0113_4868_9146_9dcc60c10f63.slice/crio-1df897c1ed6e12b2e1fbaddea1be474231259dd32ad4b37af06c8e5bca5943c5 WatchSource:0}: Error finding container 1df897c1ed6e12b2e1fbaddea1be474231259dd32ad4b37af06c8e5bca5943c5: Status 404 returned error can't find the container with id 1df897c1ed6e12b2e1fbaddea1be474231259dd32ad4b37af06c8e5bca5943c5 Dec 05 17:38:40 crc kubenswrapper[4753]: I1205 17:38:40.625131 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-storageinit-sf7pd"] Dec 05 17:38:40 crc kubenswrapper[4753]: I1205 17:38:40.674594 4753 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/75929efc-b524-4475-bcbc-e692e3353815-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:40 crc kubenswrapper[4753]: I1205 17:38:40.674630 4753 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/75929efc-b524-4475-bcbc-e692e3353815-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:40 crc kubenswrapper[4753]: I1205 17:38:40.674641 4753 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/75929efc-b524-4475-bcbc-e692e3353815-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:40 crc kubenswrapper[4753]: I1205 17:38:40.762288 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-595979776c-hpkz9"] Dec 05 17:38:40 crc kubenswrapper[4753]: I1205 17:38:40.772545 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-595979776c-hpkz9"] Dec 05 17:38:41 crc kubenswrapper[4753]: I1205 17:38:41.413744 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-storageinit-sf7pd" event={"ID":"369dd235-0113-4868-9146-9dcc60c10f63","Type":"ContainerStarted","Data":"417d313f097ad56274390b453a4c4d43754be2e637523d7ab673cff2812db7f0"} Dec 05 17:38:41 crc kubenswrapper[4753]: I1205 17:38:41.414005 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-storageinit-sf7pd" event={"ID":"369dd235-0113-4868-9146-9dcc60c10f63","Type":"ContainerStarted","Data":"1df897c1ed6e12b2e1fbaddea1be474231259dd32ad4b37af06c8e5bca5943c5"} Dec 05 17:38:41 crc kubenswrapper[4753]: I1205 17:38:41.430969 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-storageinit-sf7pd" podStartSLOduration=2.4309494369999998 podStartE2EDuration="2.430949437s" podCreationTimestamp="2025-12-05 17:38:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:38:41.4299602 +0000 UTC m=+2059.933067206" watchObservedRunningTime="2025-12-05 17:38:41.430949437 +0000 UTC m=+2059.934056443" Dec 05 17:38:41 crc kubenswrapper[4753]: I1205 17:38:41.747773 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="75929efc-b524-4475-bcbc-e692e3353815" path="/var/lib/kubelet/pods/75929efc-b524-4475-bcbc-e692e3353815/volumes" Dec 05 17:38:43 crc kubenswrapper[4753]: I1205 17:38:43.444510 4753 generic.go:334] "Generic (PLEG): container finished" podID="369dd235-0113-4868-9146-9dcc60c10f63" containerID="417d313f097ad56274390b453a4c4d43754be2e637523d7ab673cff2812db7f0" exitCode=0 Dec 05 17:38:43 crc kubenswrapper[4753]: I1205 17:38:43.444620 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-storageinit-sf7pd" event={"ID":"369dd235-0113-4868-9146-9dcc60c10f63","Type":"ContainerDied","Data":"417d313f097ad56274390b453a4c4d43754be2e637523d7ab673cff2812db7f0"} Dec 05 17:38:45 crc kubenswrapper[4753]: I1205 17:38:45.040381 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-storageinit-sf7pd" Dec 05 17:38:45 crc kubenswrapper[4753]: I1205 17:38:45.190090 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fxkm6\" (UniqueName: \"kubernetes.io/projected/369dd235-0113-4868-9146-9dcc60c10f63-kube-api-access-fxkm6\") pod \"369dd235-0113-4868-9146-9dcc60c10f63\" (UID: \"369dd235-0113-4868-9146-9dcc60c10f63\") " Dec 05 17:38:45 crc kubenswrapper[4753]: I1205 17:38:45.190210 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/369dd235-0113-4868-9146-9dcc60c10f63-certs\") pod \"369dd235-0113-4868-9146-9dcc60c10f63\" (UID: \"369dd235-0113-4868-9146-9dcc60c10f63\") " Dec 05 17:38:45 crc kubenswrapper[4753]: I1205 17:38:45.190370 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/369dd235-0113-4868-9146-9dcc60c10f63-config-data\") pod \"369dd235-0113-4868-9146-9dcc60c10f63\" (UID: \"369dd235-0113-4868-9146-9dcc60c10f63\") " Dec 05 17:38:45 crc kubenswrapper[4753]: I1205 17:38:45.190391 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/369dd235-0113-4868-9146-9dcc60c10f63-combined-ca-bundle\") pod \"369dd235-0113-4868-9146-9dcc60c10f63\" (UID: \"369dd235-0113-4868-9146-9dcc60c10f63\") " Dec 05 17:38:45 crc kubenswrapper[4753]: I1205 17:38:45.190500 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/369dd235-0113-4868-9146-9dcc60c10f63-scripts\") pod \"369dd235-0113-4868-9146-9dcc60c10f63\" (UID: \"369dd235-0113-4868-9146-9dcc60c10f63\") " Dec 05 17:38:45 crc kubenswrapper[4753]: I1205 17:38:45.198199 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/369dd235-0113-4868-9146-9dcc60c10f63-scripts" (OuterVolumeSpecName: "scripts") pod "369dd235-0113-4868-9146-9dcc60c10f63" (UID: "369dd235-0113-4868-9146-9dcc60c10f63"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:38:45 crc kubenswrapper[4753]: I1205 17:38:45.199444 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/369dd235-0113-4868-9146-9dcc60c10f63-certs" (OuterVolumeSpecName: "certs") pod "369dd235-0113-4868-9146-9dcc60c10f63" (UID: "369dd235-0113-4868-9146-9dcc60c10f63"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:38:45 crc kubenswrapper[4753]: I1205 17:38:45.220066 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/369dd235-0113-4868-9146-9dcc60c10f63-kube-api-access-fxkm6" (OuterVolumeSpecName: "kube-api-access-fxkm6") pod "369dd235-0113-4868-9146-9dcc60c10f63" (UID: "369dd235-0113-4868-9146-9dcc60c10f63"). InnerVolumeSpecName "kube-api-access-fxkm6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:38:45 crc kubenswrapper[4753]: I1205 17:38:45.221057 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/369dd235-0113-4868-9146-9dcc60c10f63-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "369dd235-0113-4868-9146-9dcc60c10f63" (UID: "369dd235-0113-4868-9146-9dcc60c10f63"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:38:45 crc kubenswrapper[4753]: I1205 17:38:45.232018 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/369dd235-0113-4868-9146-9dcc60c10f63-config-data" (OuterVolumeSpecName: "config-data") pod "369dd235-0113-4868-9146-9dcc60c10f63" (UID: "369dd235-0113-4868-9146-9dcc60c10f63"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:38:45 crc kubenswrapper[4753]: I1205 17:38:45.293435 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fxkm6\" (UniqueName: \"kubernetes.io/projected/369dd235-0113-4868-9146-9dcc60c10f63-kube-api-access-fxkm6\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:45 crc kubenswrapper[4753]: I1205 17:38:45.293475 4753 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/projected/369dd235-0113-4868-9146-9dcc60c10f63-certs\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:45 crc kubenswrapper[4753]: I1205 17:38:45.293489 4753 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/369dd235-0113-4868-9146-9dcc60c10f63-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:45 crc kubenswrapper[4753]: I1205 17:38:45.293501 4753 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/369dd235-0113-4868-9146-9dcc60c10f63-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:45 crc kubenswrapper[4753]: I1205 17:38:45.293511 4753 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/369dd235-0113-4868-9146-9dcc60c10f63-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:45 crc kubenswrapper[4753]: I1205 17:38:45.476267 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-storageinit-sf7pd" event={"ID":"369dd235-0113-4868-9146-9dcc60c10f63","Type":"ContainerDied","Data":"1df897c1ed6e12b2e1fbaddea1be474231259dd32ad4b37af06c8e5bca5943c5"} Dec 05 17:38:45 crc kubenswrapper[4753]: I1205 17:38:45.476321 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-storageinit-sf7pd" Dec 05 17:38:45 crc kubenswrapper[4753]: I1205 17:38:45.476330 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1df897c1ed6e12b2e1fbaddea1be474231259dd32ad4b37af06c8e5bca5943c5" Dec 05 17:38:45 crc kubenswrapper[4753]: I1205 17:38:45.591748 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 05 17:38:45 crc kubenswrapper[4753]: I1205 17:38:45.592349 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cloudkitty-proc-0" podUID="e3e74a0c-459c-4ecd-be3b-9eafb9656068" containerName="cloudkitty-proc" containerID="cri-o://04ab774fa9c185ad246330c2625e966fb597984af3ddf6a43f3b2cf495bc9d73" gracePeriod=30 Dec 05 17:38:45 crc kubenswrapper[4753]: I1205 17:38:45.608468 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-api-0"] Dec 05 17:38:45 crc kubenswrapper[4753]: I1205 17:38:45.608897 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cloudkitty-api-0" podUID="cb0f0974-bb6e-4777-a4b0-bee542faf6b5" containerName="cloudkitty-api-log" containerID="cri-o://10dbbb352ffa630c5c380ecaffb8af92854cefeb746affb85588e507d0d71f37" gracePeriod=30 Dec 05 17:38:45 crc kubenswrapper[4753]: I1205 17:38:45.608967 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cloudkitty-api-0" podUID="cb0f0974-bb6e-4777-a4b0-bee542faf6b5" containerName="cloudkitty-api" containerID="cri-o://ffbbf829573ca5b64ede67e0203bb54f37b12f61186c5cfd7954ba5df79a7cb3" gracePeriod=30 Dec 05 17:38:46 crc kubenswrapper[4753]: I1205 17:38:46.488175 4753 generic.go:334] "Generic (PLEG): container finished" podID="e3e74a0c-459c-4ecd-be3b-9eafb9656068" containerID="04ab774fa9c185ad246330c2625e966fb597984af3ddf6a43f3b2cf495bc9d73" exitCode=0 Dec 05 17:38:46 crc kubenswrapper[4753]: I1205 17:38:46.488260 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"e3e74a0c-459c-4ecd-be3b-9eafb9656068","Type":"ContainerDied","Data":"04ab774fa9c185ad246330c2625e966fb597984af3ddf6a43f3b2cf495bc9d73"} Dec 05 17:38:46 crc kubenswrapper[4753]: I1205 17:38:46.491051 4753 generic.go:334] "Generic (PLEG): container finished" podID="cb0f0974-bb6e-4777-a4b0-bee542faf6b5" containerID="10dbbb352ffa630c5c380ecaffb8af92854cefeb746affb85588e507d0d71f37" exitCode=143 Dec 05 17:38:46 crc kubenswrapper[4753]: I1205 17:38:46.491076 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"cb0f0974-bb6e-4777-a4b0-bee542faf6b5","Type":"ContainerDied","Data":"10dbbb352ffa630c5c380ecaffb8af92854cefeb746affb85588e507d0d71f37"} Dec 05 17:38:46 crc kubenswrapper[4753]: I1205 17:38:46.949320 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-proc-0" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.026919 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3e74a0c-459c-4ecd-be3b-9eafb9656068-config-data\") pod \"e3e74a0c-459c-4ecd-be3b-9eafb9656068\" (UID: \"e3e74a0c-459c-4ecd-be3b-9eafb9656068\") " Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.026984 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e3e74a0c-459c-4ecd-be3b-9eafb9656068-scripts\") pod \"e3e74a0c-459c-4ecd-be3b-9eafb9656068\" (UID: \"e3e74a0c-459c-4ecd-be3b-9eafb9656068\") " Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.027105 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e3e74a0c-459c-4ecd-be3b-9eafb9656068-config-data-custom\") pod \"e3e74a0c-459c-4ecd-be3b-9eafb9656068\" (UID: \"e3e74a0c-459c-4ecd-be3b-9eafb9656068\") " Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.027133 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/e3e74a0c-459c-4ecd-be3b-9eafb9656068-certs\") pod \"e3e74a0c-459c-4ecd-be3b-9eafb9656068\" (UID: \"e3e74a0c-459c-4ecd-be3b-9eafb9656068\") " Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.027249 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3e74a0c-459c-4ecd-be3b-9eafb9656068-combined-ca-bundle\") pod \"e3e74a0c-459c-4ecd-be3b-9eafb9656068\" (UID: \"e3e74a0c-459c-4ecd-be3b-9eafb9656068\") " Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.027315 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6dd97\" (UniqueName: \"kubernetes.io/projected/e3e74a0c-459c-4ecd-be3b-9eafb9656068-kube-api-access-6dd97\") pod \"e3e74a0c-459c-4ecd-be3b-9eafb9656068\" (UID: \"e3e74a0c-459c-4ecd-be3b-9eafb9656068\") " Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.036989 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e3e74a0c-459c-4ecd-be3b-9eafb9656068-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "e3e74a0c-459c-4ecd-be3b-9eafb9656068" (UID: "e3e74a0c-459c-4ecd-be3b-9eafb9656068"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.038530 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e3e74a0c-459c-4ecd-be3b-9eafb9656068-certs" (OuterVolumeSpecName: "certs") pod "e3e74a0c-459c-4ecd-be3b-9eafb9656068" (UID: "e3e74a0c-459c-4ecd-be3b-9eafb9656068"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.038822 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e3e74a0c-459c-4ecd-be3b-9eafb9656068-scripts" (OuterVolumeSpecName: "scripts") pod "e3e74a0c-459c-4ecd-be3b-9eafb9656068" (UID: "e3e74a0c-459c-4ecd-be3b-9eafb9656068"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.068980 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-api-0" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.071462 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e3e74a0c-459c-4ecd-be3b-9eafb9656068-kube-api-access-6dd97" (OuterVolumeSpecName: "kube-api-access-6dd97") pod "e3e74a0c-459c-4ecd-be3b-9eafb9656068" (UID: "e3e74a0c-459c-4ecd-be3b-9eafb9656068"). InnerVolumeSpecName "kube-api-access-6dd97". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.098762 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e3e74a0c-459c-4ecd-be3b-9eafb9656068-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e3e74a0c-459c-4ecd-be3b-9eafb9656068" (UID: "e3e74a0c-459c-4ecd-be3b-9eafb9656068"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.104529 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e3e74a0c-459c-4ecd-be3b-9eafb9656068-config-data" (OuterVolumeSpecName: "config-data") pod "e3e74a0c-459c-4ecd-be3b-9eafb9656068" (UID: "e3e74a0c-459c-4ecd-be3b-9eafb9656068"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.131493 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cb0f0974-bb6e-4777-a4b0-bee542faf6b5-scripts\") pod \"cb0f0974-bb6e-4777-a4b0-bee542faf6b5\" (UID: \"cb0f0974-bb6e-4777-a4b0-bee542faf6b5\") " Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.131680 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cb0f0974-bb6e-4777-a4b0-bee542faf6b5-config-data\") pod \"cb0f0974-bb6e-4777-a4b0-bee542faf6b5\" (UID: \"cb0f0974-bb6e-4777-a4b0-bee542faf6b5\") " Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.131722 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cb0f0974-bb6e-4777-a4b0-bee542faf6b5-public-tls-certs\") pod \"cb0f0974-bb6e-4777-a4b0-bee542faf6b5\" (UID: \"cb0f0974-bb6e-4777-a4b0-bee542faf6b5\") " Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.131801 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/cb0f0974-bb6e-4777-a4b0-bee542faf6b5-certs\") pod \"cb0f0974-bb6e-4777-a4b0-bee542faf6b5\" (UID: \"cb0f0974-bb6e-4777-a4b0-bee542faf6b5\") " Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.131844 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lbm96\" (UniqueName: \"kubernetes.io/projected/cb0f0974-bb6e-4777-a4b0-bee542faf6b5-kube-api-access-lbm96\") pod \"cb0f0974-bb6e-4777-a4b0-bee542faf6b5\" (UID: \"cb0f0974-bb6e-4777-a4b0-bee542faf6b5\") " Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.131916 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb0f0974-bb6e-4777-a4b0-bee542faf6b5-combined-ca-bundle\") pod \"cb0f0974-bb6e-4777-a4b0-bee542faf6b5\" (UID: \"cb0f0974-bb6e-4777-a4b0-bee542faf6b5\") " Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.131960 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cb0f0974-bb6e-4777-a4b0-bee542faf6b5-internal-tls-certs\") pod \"cb0f0974-bb6e-4777-a4b0-bee542faf6b5\" (UID: \"cb0f0974-bb6e-4777-a4b0-bee542faf6b5\") " Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.131986 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cb0f0974-bb6e-4777-a4b0-bee542faf6b5-config-data-custom\") pod \"cb0f0974-bb6e-4777-a4b0-bee542faf6b5\" (UID: \"cb0f0974-bb6e-4777-a4b0-bee542faf6b5\") " Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.132035 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cb0f0974-bb6e-4777-a4b0-bee542faf6b5-logs\") pod \"cb0f0974-bb6e-4777-a4b0-bee542faf6b5\" (UID: \"cb0f0974-bb6e-4777-a4b0-bee542faf6b5\") " Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.133203 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cb0f0974-bb6e-4777-a4b0-bee542faf6b5-logs" (OuterVolumeSpecName: "logs") pod "cb0f0974-bb6e-4777-a4b0-bee542faf6b5" (UID: "cb0f0974-bb6e-4777-a4b0-bee542faf6b5"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.133876 4753 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e3e74a0c-459c-4ecd-be3b-9eafb9656068-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.133908 4753 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/projected/e3e74a0c-459c-4ecd-be3b-9eafb9656068-certs\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.133916 4753 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3e74a0c-459c-4ecd-be3b-9eafb9656068-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.133925 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6dd97\" (UniqueName: \"kubernetes.io/projected/e3e74a0c-459c-4ecd-be3b-9eafb9656068-kube-api-access-6dd97\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.133938 4753 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cb0f0974-bb6e-4777-a4b0-bee542faf6b5-logs\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.133948 4753 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3e74a0c-459c-4ecd-be3b-9eafb9656068-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.133956 4753 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e3e74a0c-459c-4ecd-be3b-9eafb9656068-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.135802 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cb0f0974-bb6e-4777-a4b0-bee542faf6b5-certs" (OuterVolumeSpecName: "certs") pod "cb0f0974-bb6e-4777-a4b0-bee542faf6b5" (UID: "cb0f0974-bb6e-4777-a4b0-bee542faf6b5"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.145226 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cb0f0974-bb6e-4777-a4b0-bee542faf6b5-kube-api-access-lbm96" (OuterVolumeSpecName: "kube-api-access-lbm96") pod "cb0f0974-bb6e-4777-a4b0-bee542faf6b5" (UID: "cb0f0974-bb6e-4777-a4b0-bee542faf6b5"). InnerVolumeSpecName "kube-api-access-lbm96". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.148105 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cb0f0974-bb6e-4777-a4b0-bee542faf6b5-scripts" (OuterVolumeSpecName: "scripts") pod "cb0f0974-bb6e-4777-a4b0-bee542faf6b5" (UID: "cb0f0974-bb6e-4777-a4b0-bee542faf6b5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.148557 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cb0f0974-bb6e-4777-a4b0-bee542faf6b5-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "cb0f0974-bb6e-4777-a4b0-bee542faf6b5" (UID: "cb0f0974-bb6e-4777-a4b0-bee542faf6b5"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.184497 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cb0f0974-bb6e-4777-a4b0-bee542faf6b5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cb0f0974-bb6e-4777-a4b0-bee542faf6b5" (UID: "cb0f0974-bb6e-4777-a4b0-bee542faf6b5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.205032 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cb0f0974-bb6e-4777-a4b0-bee542faf6b5-config-data" (OuterVolumeSpecName: "config-data") pod "cb0f0974-bb6e-4777-a4b0-bee542faf6b5" (UID: "cb0f0974-bb6e-4777-a4b0-bee542faf6b5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.228900 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cb0f0974-bb6e-4777-a4b0-bee542faf6b5-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "cb0f0974-bb6e-4777-a4b0-bee542faf6b5" (UID: "cb0f0974-bb6e-4777-a4b0-bee542faf6b5"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.229247 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cb0f0974-bb6e-4777-a4b0-bee542faf6b5-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "cb0f0974-bb6e-4777-a4b0-bee542faf6b5" (UID: "cb0f0974-bb6e-4777-a4b0-bee542faf6b5"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.235766 4753 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb0f0974-bb6e-4777-a4b0-bee542faf6b5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.235799 4753 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cb0f0974-bb6e-4777-a4b0-bee542faf6b5-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.235811 4753 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/cb0f0974-bb6e-4777-a4b0-bee542faf6b5-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.235821 4753 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cb0f0974-bb6e-4777-a4b0-bee542faf6b5-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.235829 4753 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cb0f0974-bb6e-4777-a4b0-bee542faf6b5-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.235837 4753 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cb0f0974-bb6e-4777-a4b0-bee542faf6b5-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.235845 4753 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/projected/cb0f0974-bb6e-4777-a4b0-bee542faf6b5-certs\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.235853 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lbm96\" (UniqueName: \"kubernetes.io/projected/cb0f0974-bb6e-4777-a4b0-bee542faf6b5-kube-api-access-lbm96\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.504039 4753 generic.go:334] "Generic (PLEG): container finished" podID="cb0f0974-bb6e-4777-a4b0-bee542faf6b5" containerID="ffbbf829573ca5b64ede67e0203bb54f37b12f61186c5cfd7954ba5df79a7cb3" exitCode=0 Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.504094 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"cb0f0974-bb6e-4777-a4b0-bee542faf6b5","Type":"ContainerDied","Data":"ffbbf829573ca5b64ede67e0203bb54f37b12f61186c5cfd7954ba5df79a7cb3"} Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.504119 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-api-0" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.504189 4753 scope.go:117] "RemoveContainer" containerID="ffbbf829573ca5b64ede67e0203bb54f37b12f61186c5cfd7954ba5df79a7cb3" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.504174 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"cb0f0974-bb6e-4777-a4b0-bee542faf6b5","Type":"ContainerDied","Data":"ae9458ae39c2edb69902ab07769b3fa45d8de44ef451ad335063ffaeddba4474"} Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.506823 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"e3e74a0c-459c-4ecd-be3b-9eafb9656068","Type":"ContainerDied","Data":"95625e0027f053715e98bf27aaf2c6c9f783f2ffd15a6bde5b0bdcfe7b3d3a40"} Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.506912 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-proc-0" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.530037 4753 scope.go:117] "RemoveContainer" containerID="10dbbb352ffa630c5c380ecaffb8af92854cefeb746affb85588e507d0d71f37" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.547541 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-api-0"] Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.559742 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cloudkitty-api-0"] Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.573558 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.577515 4753 scope.go:117] "RemoveContainer" containerID="ffbbf829573ca5b64ede67e0203bb54f37b12f61186c5cfd7954ba5df79a7cb3" Dec 05 17:38:47 crc kubenswrapper[4753]: E1205 17:38:47.580626 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ffbbf829573ca5b64ede67e0203bb54f37b12f61186c5cfd7954ba5df79a7cb3\": container with ID starting with ffbbf829573ca5b64ede67e0203bb54f37b12f61186c5cfd7954ba5df79a7cb3 not found: ID does not exist" containerID="ffbbf829573ca5b64ede67e0203bb54f37b12f61186c5cfd7954ba5df79a7cb3" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.580663 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ffbbf829573ca5b64ede67e0203bb54f37b12f61186c5cfd7954ba5df79a7cb3"} err="failed to get container status \"ffbbf829573ca5b64ede67e0203bb54f37b12f61186c5cfd7954ba5df79a7cb3\": rpc error: code = NotFound desc = could not find container \"ffbbf829573ca5b64ede67e0203bb54f37b12f61186c5cfd7954ba5df79a7cb3\": container with ID starting with ffbbf829573ca5b64ede67e0203bb54f37b12f61186c5cfd7954ba5df79a7cb3 not found: ID does not exist" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.580691 4753 scope.go:117] "RemoveContainer" containerID="10dbbb352ffa630c5c380ecaffb8af92854cefeb746affb85588e507d0d71f37" Dec 05 17:38:47 crc kubenswrapper[4753]: E1205 17:38:47.581117 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"10dbbb352ffa630c5c380ecaffb8af92854cefeb746affb85588e507d0d71f37\": container with ID starting with 10dbbb352ffa630c5c380ecaffb8af92854cefeb746affb85588e507d0d71f37 not found: ID does not exist" containerID="10dbbb352ffa630c5c380ecaffb8af92854cefeb746affb85588e507d0d71f37" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.581303 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"10dbbb352ffa630c5c380ecaffb8af92854cefeb746affb85588e507d0d71f37"} err="failed to get container status \"10dbbb352ffa630c5c380ecaffb8af92854cefeb746affb85588e507d0d71f37\": rpc error: code = NotFound desc = could not find container \"10dbbb352ffa630c5c380ecaffb8af92854cefeb746affb85588e507d0d71f37\": container with ID starting with 10dbbb352ffa630c5c380ecaffb8af92854cefeb746affb85588e507d0d71f37 not found: ID does not exist" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.581443 4753 scope.go:117] "RemoveContainer" containerID="04ab774fa9c185ad246330c2625e966fb597984af3ddf6a43f3b2cf495bc9d73" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.606225 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-api-0"] Dec 05 17:38:47 crc kubenswrapper[4753]: E1205 17:38:47.612725 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3e74a0c-459c-4ecd-be3b-9eafb9656068" containerName="cloudkitty-proc" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.612776 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3e74a0c-459c-4ecd-be3b-9eafb9656068" containerName="cloudkitty-proc" Dec 05 17:38:47 crc kubenswrapper[4753]: E1205 17:38:47.612913 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb0f0974-bb6e-4777-a4b0-bee542faf6b5" containerName="cloudkitty-api-log" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.612927 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb0f0974-bb6e-4777-a4b0-bee542faf6b5" containerName="cloudkitty-api-log" Dec 05 17:38:47 crc kubenswrapper[4753]: E1205 17:38:47.612953 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="369dd235-0113-4868-9146-9dcc60c10f63" containerName="cloudkitty-storageinit" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.612963 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="369dd235-0113-4868-9146-9dcc60c10f63" containerName="cloudkitty-storageinit" Dec 05 17:38:47 crc kubenswrapper[4753]: E1205 17:38:47.613007 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="75929efc-b524-4475-bcbc-e692e3353815" containerName="dnsmasq-dns" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.613016 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="75929efc-b524-4475-bcbc-e692e3353815" containerName="dnsmasq-dns" Dec 05 17:38:47 crc kubenswrapper[4753]: E1205 17:38:47.613048 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb0f0974-bb6e-4777-a4b0-bee542faf6b5" containerName="cloudkitty-api" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.613057 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb0f0974-bb6e-4777-a4b0-bee542faf6b5" containerName="cloudkitty-api" Dec 05 17:38:47 crc kubenswrapper[4753]: E1205 17:38:47.613073 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="75929efc-b524-4475-bcbc-e692e3353815" containerName="init" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.613094 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="75929efc-b524-4475-bcbc-e692e3353815" containerName="init" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.615979 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="369dd235-0113-4868-9146-9dcc60c10f63" containerName="cloudkitty-storageinit" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.616035 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="cb0f0974-bb6e-4777-a4b0-bee542faf6b5" containerName="cloudkitty-api" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.616058 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="75929efc-b524-4475-bcbc-e692e3353815" containerName="dnsmasq-dns" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.657255 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="e3e74a0c-459c-4ecd-be3b-9eafb9656068" containerName="cloudkitty-proc" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.657298 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="cb0f0974-bb6e-4777-a4b0-bee542faf6b5" containerName="cloudkitty-api-log" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.660382 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-api-0" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.667592 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cloudkitty-public-svc" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.667683 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-config-data" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.667940 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cloudkitty-internal-svc" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.681640 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-cloudkitty-dockercfg-45bq2" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.681886 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-scripts" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.682113 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-api-config-data" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.705347 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cloudkitty-client-internal" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.713362 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.773850 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cb0f0974-bb6e-4777-a4b0-bee542faf6b5" path="/var/lib/kubelet/pods/cb0f0974-bb6e-4777-a4b0-bee542faf6b5/volumes" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.774639 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e3e74a0c-459c-4ecd-be3b-9eafb9656068" path="/var/lib/kubelet/pods/e3e74a0c-459c-4ecd-be3b-9eafb9656068/volumes" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.786318 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-api-0"] Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.826217 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.827669 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-proc-0" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.831045 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cloudkitty-proc-config-data" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.873219 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.878541 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-857qn\" (UniqueName: \"kubernetes.io/projected/5f627cef-fbe5-40ef-beeb-e3b08861c449-kube-api-access-857qn\") pod \"cloudkitty-api-0\" (UID: \"5f627cef-fbe5-40ef-beeb-e3b08861c449\") " pod="openstack/cloudkitty-api-0" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.878658 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5f627cef-fbe5-40ef-beeb-e3b08861c449-logs\") pod \"cloudkitty-api-0\" (UID: \"5f627cef-fbe5-40ef-beeb-e3b08861c449\") " pod="openstack/cloudkitty-api-0" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.878756 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5f627cef-fbe5-40ef-beeb-e3b08861c449-public-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"5f627cef-fbe5-40ef-beeb-e3b08861c449\") " pod="openstack/cloudkitty-api-0" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.878787 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5f627cef-fbe5-40ef-beeb-e3b08861c449-internal-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"5f627cef-fbe5-40ef-beeb-e3b08861c449\") " pod="openstack/cloudkitty-api-0" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.878830 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f627cef-fbe5-40ef-beeb-e3b08861c449-config-data\") pod \"cloudkitty-api-0\" (UID: \"5f627cef-fbe5-40ef-beeb-e3b08861c449\") " pod="openstack/cloudkitty-api-0" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.878847 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/5f627cef-fbe5-40ef-beeb-e3b08861c449-certs\") pod \"cloudkitty-api-0\" (UID: \"5f627cef-fbe5-40ef-beeb-e3b08861c449\") " pod="openstack/cloudkitty-api-0" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.878883 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f627cef-fbe5-40ef-beeb-e3b08861c449-combined-ca-bundle\") pod \"cloudkitty-api-0\" (UID: \"5f627cef-fbe5-40ef-beeb-e3b08861c449\") " pod="openstack/cloudkitty-api-0" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.878924 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5f627cef-fbe5-40ef-beeb-e3b08861c449-scripts\") pod \"cloudkitty-api-0\" (UID: \"5f627cef-fbe5-40ef-beeb-e3b08861c449\") " pod="openstack/cloudkitty-api-0" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.878956 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5f627cef-fbe5-40ef-beeb-e3b08861c449-config-data-custom\") pod \"cloudkitty-api-0\" (UID: \"5f627cef-fbe5-40ef-beeb-e3b08861c449\") " pod="openstack/cloudkitty-api-0" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.981314 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5f627cef-fbe5-40ef-beeb-e3b08861c449-config-data-custom\") pod \"cloudkitty-api-0\" (UID: \"5f627cef-fbe5-40ef-beeb-e3b08861c449\") " pod="openstack/cloudkitty-api-0" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.981434 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r7x9r\" (UniqueName: \"kubernetes.io/projected/11174666-3e97-47ce-90f5-55ee37dddf75-kube-api-access-r7x9r\") pod \"cloudkitty-proc-0\" (UID: \"11174666-3e97-47ce-90f5-55ee37dddf75\") " pod="openstack/cloudkitty-proc-0" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.981462 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-857qn\" (UniqueName: \"kubernetes.io/projected/5f627cef-fbe5-40ef-beeb-e3b08861c449-kube-api-access-857qn\") pod \"cloudkitty-api-0\" (UID: \"5f627cef-fbe5-40ef-beeb-e3b08861c449\") " pod="openstack/cloudkitty-api-0" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.981486 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/11174666-3e97-47ce-90f5-55ee37dddf75-config-data-custom\") pod \"cloudkitty-proc-0\" (UID: \"11174666-3e97-47ce-90f5-55ee37dddf75\") " pod="openstack/cloudkitty-proc-0" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.981525 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/11174666-3e97-47ce-90f5-55ee37dddf75-config-data\") pod \"cloudkitty-proc-0\" (UID: \"11174666-3e97-47ce-90f5-55ee37dddf75\") " pod="openstack/cloudkitty-proc-0" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.981561 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5f627cef-fbe5-40ef-beeb-e3b08861c449-logs\") pod \"cloudkitty-api-0\" (UID: \"5f627cef-fbe5-40ef-beeb-e3b08861c449\") " pod="openstack/cloudkitty-api-0" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.981623 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/11174666-3e97-47ce-90f5-55ee37dddf75-certs\") pod \"cloudkitty-proc-0\" (UID: \"11174666-3e97-47ce-90f5-55ee37dddf75\") " pod="openstack/cloudkitty-proc-0" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.981658 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5f627cef-fbe5-40ef-beeb-e3b08861c449-public-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"5f627cef-fbe5-40ef-beeb-e3b08861c449\") " pod="openstack/cloudkitty-api-0" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.981702 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/11174666-3e97-47ce-90f5-55ee37dddf75-scripts\") pod \"cloudkitty-proc-0\" (UID: \"11174666-3e97-47ce-90f5-55ee37dddf75\") " pod="openstack/cloudkitty-proc-0" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.981743 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5f627cef-fbe5-40ef-beeb-e3b08861c449-internal-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"5f627cef-fbe5-40ef-beeb-e3b08861c449\") " pod="openstack/cloudkitty-api-0" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.981777 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f627cef-fbe5-40ef-beeb-e3b08861c449-config-data\") pod \"cloudkitty-api-0\" (UID: \"5f627cef-fbe5-40ef-beeb-e3b08861c449\") " pod="openstack/cloudkitty-api-0" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.981816 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11174666-3e97-47ce-90f5-55ee37dddf75-combined-ca-bundle\") pod \"cloudkitty-proc-0\" (UID: \"11174666-3e97-47ce-90f5-55ee37dddf75\") " pod="openstack/cloudkitty-proc-0" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.981839 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/5f627cef-fbe5-40ef-beeb-e3b08861c449-certs\") pod \"cloudkitty-api-0\" (UID: \"5f627cef-fbe5-40ef-beeb-e3b08861c449\") " pod="openstack/cloudkitty-api-0" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.981879 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f627cef-fbe5-40ef-beeb-e3b08861c449-combined-ca-bundle\") pod \"cloudkitty-api-0\" (UID: \"5f627cef-fbe5-40ef-beeb-e3b08861c449\") " pod="openstack/cloudkitty-api-0" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.981912 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5f627cef-fbe5-40ef-beeb-e3b08861c449-scripts\") pod \"cloudkitty-api-0\" (UID: \"5f627cef-fbe5-40ef-beeb-e3b08861c449\") " pod="openstack/cloudkitty-api-0" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.984643 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5f627cef-fbe5-40ef-beeb-e3b08861c449-logs\") pod \"cloudkitty-api-0\" (UID: \"5f627cef-fbe5-40ef-beeb-e3b08861c449\") " pod="openstack/cloudkitty-api-0" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.987607 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5f627cef-fbe5-40ef-beeb-e3b08861c449-scripts\") pod \"cloudkitty-api-0\" (UID: \"5f627cef-fbe5-40ef-beeb-e3b08861c449\") " pod="openstack/cloudkitty-api-0" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.988094 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5f627cef-fbe5-40ef-beeb-e3b08861c449-public-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"5f627cef-fbe5-40ef-beeb-e3b08861c449\") " pod="openstack/cloudkitty-api-0" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.988301 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5f627cef-fbe5-40ef-beeb-e3b08861c449-config-data\") pod \"cloudkitty-api-0\" (UID: \"5f627cef-fbe5-40ef-beeb-e3b08861c449\") " pod="openstack/cloudkitty-api-0" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.990090 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5f627cef-fbe5-40ef-beeb-e3b08861c449-internal-tls-certs\") pod \"cloudkitty-api-0\" (UID: \"5f627cef-fbe5-40ef-beeb-e3b08861c449\") " pod="openstack/cloudkitty-api-0" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.991489 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/projected/5f627cef-fbe5-40ef-beeb-e3b08861c449-certs\") pod \"cloudkitty-api-0\" (UID: \"5f627cef-fbe5-40ef-beeb-e3b08861c449\") " pod="openstack/cloudkitty-api-0" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.991529 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5f627cef-fbe5-40ef-beeb-e3b08861c449-config-data-custom\") pod \"cloudkitty-api-0\" (UID: \"5f627cef-fbe5-40ef-beeb-e3b08861c449\") " pod="openstack/cloudkitty-api-0" Dec 05 17:38:47 crc kubenswrapper[4753]: I1205 17:38:47.994630 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f627cef-fbe5-40ef-beeb-e3b08861c449-combined-ca-bundle\") pod \"cloudkitty-api-0\" (UID: \"5f627cef-fbe5-40ef-beeb-e3b08861c449\") " pod="openstack/cloudkitty-api-0" Dec 05 17:38:48 crc kubenswrapper[4753]: I1205 17:38:48.000381 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-857qn\" (UniqueName: \"kubernetes.io/projected/5f627cef-fbe5-40ef-beeb-e3b08861c449-kube-api-access-857qn\") pod \"cloudkitty-api-0\" (UID: \"5f627cef-fbe5-40ef-beeb-e3b08861c449\") " pod="openstack/cloudkitty-api-0" Dec 05 17:38:48 crc kubenswrapper[4753]: I1205 17:38:48.064648 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-api-0" Dec 05 17:38:48 crc kubenswrapper[4753]: I1205 17:38:48.083673 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/projected/11174666-3e97-47ce-90f5-55ee37dddf75-certs\") pod \"cloudkitty-proc-0\" (UID: \"11174666-3e97-47ce-90f5-55ee37dddf75\") " pod="openstack/cloudkitty-proc-0" Dec 05 17:38:48 crc kubenswrapper[4753]: I1205 17:38:48.083763 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/11174666-3e97-47ce-90f5-55ee37dddf75-scripts\") pod \"cloudkitty-proc-0\" (UID: \"11174666-3e97-47ce-90f5-55ee37dddf75\") " pod="openstack/cloudkitty-proc-0" Dec 05 17:38:48 crc kubenswrapper[4753]: I1205 17:38:48.083815 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11174666-3e97-47ce-90f5-55ee37dddf75-combined-ca-bundle\") pod \"cloudkitty-proc-0\" (UID: \"11174666-3e97-47ce-90f5-55ee37dddf75\") " pod="openstack/cloudkitty-proc-0" Dec 05 17:38:48 crc kubenswrapper[4753]: I1205 17:38:48.083948 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r7x9r\" (UniqueName: \"kubernetes.io/projected/11174666-3e97-47ce-90f5-55ee37dddf75-kube-api-access-r7x9r\") pod \"cloudkitty-proc-0\" (UID: \"11174666-3e97-47ce-90f5-55ee37dddf75\") " pod="openstack/cloudkitty-proc-0" Dec 05 17:38:48 crc kubenswrapper[4753]: I1205 17:38:48.083975 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/11174666-3e97-47ce-90f5-55ee37dddf75-config-data-custom\") pod \"cloudkitty-proc-0\" (UID: \"11174666-3e97-47ce-90f5-55ee37dddf75\") " pod="openstack/cloudkitty-proc-0" Dec 05 17:38:48 crc kubenswrapper[4753]: I1205 17:38:48.084014 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/11174666-3e97-47ce-90f5-55ee37dddf75-config-data\") pod \"cloudkitty-proc-0\" (UID: \"11174666-3e97-47ce-90f5-55ee37dddf75\") " pod="openstack/cloudkitty-proc-0" Dec 05 17:38:48 crc kubenswrapper[4753]: I1205 17:38:48.089255 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/11174666-3e97-47ce-90f5-55ee37dddf75-config-data\") pod \"cloudkitty-proc-0\" (UID: \"11174666-3e97-47ce-90f5-55ee37dddf75\") " pod="openstack/cloudkitty-proc-0" Dec 05 17:38:48 crc kubenswrapper[4753]: I1205 17:38:48.089270 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/projected/11174666-3e97-47ce-90f5-55ee37dddf75-certs\") pod \"cloudkitty-proc-0\" (UID: \"11174666-3e97-47ce-90f5-55ee37dddf75\") " pod="openstack/cloudkitty-proc-0" Dec 05 17:38:48 crc kubenswrapper[4753]: I1205 17:38:48.089752 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/11174666-3e97-47ce-90f5-55ee37dddf75-scripts\") pod \"cloudkitty-proc-0\" (UID: \"11174666-3e97-47ce-90f5-55ee37dddf75\") " pod="openstack/cloudkitty-proc-0" Dec 05 17:38:48 crc kubenswrapper[4753]: I1205 17:38:48.097963 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/11174666-3e97-47ce-90f5-55ee37dddf75-config-data-custom\") pod \"cloudkitty-proc-0\" (UID: \"11174666-3e97-47ce-90f5-55ee37dddf75\") " pod="openstack/cloudkitty-proc-0" Dec 05 17:38:48 crc kubenswrapper[4753]: I1205 17:38:48.098005 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/11174666-3e97-47ce-90f5-55ee37dddf75-combined-ca-bundle\") pod \"cloudkitty-proc-0\" (UID: \"11174666-3e97-47ce-90f5-55ee37dddf75\") " pod="openstack/cloudkitty-proc-0" Dec 05 17:38:48 crc kubenswrapper[4753]: I1205 17:38:48.109920 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r7x9r\" (UniqueName: \"kubernetes.io/projected/11174666-3e97-47ce-90f5-55ee37dddf75-kube-api-access-r7x9r\") pod \"cloudkitty-proc-0\" (UID: \"11174666-3e97-47ce-90f5-55ee37dddf75\") " pod="openstack/cloudkitty-proc-0" Dec 05 17:38:48 crc kubenswrapper[4753]: I1205 17:38:48.152855 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cloudkitty-proc-0" Dec 05 17:38:48 crc kubenswrapper[4753]: I1205 17:38:48.259750 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-vjwb2"] Dec 05 17:38:48 crc kubenswrapper[4753]: I1205 17:38:48.264431 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-vjwb2" Dec 05 17:38:48 crc kubenswrapper[4753]: I1205 17:38:48.267528 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-2445p" Dec 05 17:38:48 crc kubenswrapper[4753]: I1205 17:38:48.267948 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 17:38:48 crc kubenswrapper[4753]: I1205 17:38:48.268196 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 17:38:48 crc kubenswrapper[4753]: I1205 17:38:48.268407 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 17:38:48 crc kubenswrapper[4753]: I1205 17:38:48.275087 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-vjwb2"] Dec 05 17:38:48 crc kubenswrapper[4753]: I1205 17:38:48.396508 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d4b7df21-a189-41c8-9e93-c43d0eb552c5-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-vjwb2\" (UID: \"d4b7df21-a189-41c8-9e93-c43d0eb552c5\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-vjwb2" Dec 05 17:38:48 crc kubenswrapper[4753]: I1205 17:38:48.396805 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4b7df21-a189-41c8-9e93-c43d0eb552c5-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-vjwb2\" (UID: \"d4b7df21-a189-41c8-9e93-c43d0eb552c5\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-vjwb2" Dec 05 17:38:48 crc kubenswrapper[4753]: I1205 17:38:48.396908 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d4b7df21-a189-41c8-9e93-c43d0eb552c5-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-vjwb2\" (UID: \"d4b7df21-a189-41c8-9e93-c43d0eb552c5\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-vjwb2" Dec 05 17:38:48 crc kubenswrapper[4753]: I1205 17:38:48.396947 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-csjxb\" (UniqueName: \"kubernetes.io/projected/d4b7df21-a189-41c8-9e93-c43d0eb552c5-kube-api-access-csjxb\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-vjwb2\" (UID: \"d4b7df21-a189-41c8-9e93-c43d0eb552c5\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-vjwb2" Dec 05 17:38:48 crc kubenswrapper[4753]: I1205 17:38:48.498668 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4b7df21-a189-41c8-9e93-c43d0eb552c5-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-vjwb2\" (UID: \"d4b7df21-a189-41c8-9e93-c43d0eb552c5\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-vjwb2" Dec 05 17:38:48 crc kubenswrapper[4753]: I1205 17:38:48.498753 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d4b7df21-a189-41c8-9e93-c43d0eb552c5-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-vjwb2\" (UID: \"d4b7df21-a189-41c8-9e93-c43d0eb552c5\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-vjwb2" Dec 05 17:38:48 crc kubenswrapper[4753]: I1205 17:38:48.498780 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-csjxb\" (UniqueName: \"kubernetes.io/projected/d4b7df21-a189-41c8-9e93-c43d0eb552c5-kube-api-access-csjxb\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-vjwb2\" (UID: \"d4b7df21-a189-41c8-9e93-c43d0eb552c5\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-vjwb2" Dec 05 17:38:48 crc kubenswrapper[4753]: I1205 17:38:48.498903 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d4b7df21-a189-41c8-9e93-c43d0eb552c5-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-vjwb2\" (UID: \"d4b7df21-a189-41c8-9e93-c43d0eb552c5\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-vjwb2" Dec 05 17:38:48 crc kubenswrapper[4753]: I1205 17:38:48.503410 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d4b7df21-a189-41c8-9e93-c43d0eb552c5-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-vjwb2\" (UID: \"d4b7df21-a189-41c8-9e93-c43d0eb552c5\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-vjwb2" Dec 05 17:38:48 crc kubenswrapper[4753]: I1205 17:38:48.503986 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d4b7df21-a189-41c8-9e93-c43d0eb552c5-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-vjwb2\" (UID: \"d4b7df21-a189-41c8-9e93-c43d0eb552c5\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-vjwb2" Dec 05 17:38:48 crc kubenswrapper[4753]: I1205 17:38:48.513000 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4b7df21-a189-41c8-9e93-c43d0eb552c5-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-vjwb2\" (UID: \"d4b7df21-a189-41c8-9e93-c43d0eb552c5\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-vjwb2" Dec 05 17:38:48 crc kubenswrapper[4753]: I1205 17:38:48.522442 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-csjxb\" (UniqueName: \"kubernetes.io/projected/d4b7df21-a189-41c8-9e93-c43d0eb552c5-kube-api-access-csjxb\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-vjwb2\" (UID: \"d4b7df21-a189-41c8-9e93-c43d0eb552c5\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-vjwb2" Dec 05 17:38:48 crc kubenswrapper[4753]: I1205 17:38:48.600582 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-vjwb2" Dec 05 17:38:48 crc kubenswrapper[4753]: I1205 17:38:48.638055 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-api-0"] Dec 05 17:38:48 crc kubenswrapper[4753]: W1205 17:38:48.651032 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5f627cef_fbe5_40ef_beeb_e3b08861c449.slice/crio-637d0fa0b092e7aadda852493c76dc7e15c9dd5d909d23bc5900216a1a5ebe62 WatchSource:0}: Error finding container 637d0fa0b092e7aadda852493c76dc7e15c9dd5d909d23bc5900216a1a5ebe62: Status 404 returned error can't find the container with id 637d0fa0b092e7aadda852493c76dc7e15c9dd5d909d23bc5900216a1a5ebe62 Dec 05 17:38:48 crc kubenswrapper[4753]: I1205 17:38:48.751761 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cloudkitty-proc-0"] Dec 05 17:38:49 crc kubenswrapper[4753]: I1205 17:38:49.238805 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-vjwb2"] Dec 05 17:38:49 crc kubenswrapper[4753]: W1205 17:38:49.241367 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd4b7df21_a189_41c8_9e93_c43d0eb552c5.slice/crio-0087cd36f4cba2ed1f7615b074a05515a148d85e296df73af848a6a0b6c07b0b WatchSource:0}: Error finding container 0087cd36f4cba2ed1f7615b074a05515a148d85e296df73af848a6a0b6c07b0b: Status 404 returned error can't find the container with id 0087cd36f4cba2ed1f7615b074a05515a148d85e296df73af848a6a0b6c07b0b Dec 05 17:38:49 crc kubenswrapper[4753]: I1205 17:38:49.555757 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-vjwb2" event={"ID":"d4b7df21-a189-41c8-9e93-c43d0eb552c5","Type":"ContainerStarted","Data":"0087cd36f4cba2ed1f7615b074a05515a148d85e296df73af848a6a0b6c07b0b"} Dec 05 17:38:49 crc kubenswrapper[4753]: I1205 17:38:49.559589 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"5f627cef-fbe5-40ef-beeb-e3b08861c449","Type":"ContainerStarted","Data":"97cdd908a1e3d53f60a6b14780087eec2ed86da5a1725b1c88c3199e8104e12f"} Dec 05 17:38:49 crc kubenswrapper[4753]: I1205 17:38:49.559623 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"5f627cef-fbe5-40ef-beeb-e3b08861c449","Type":"ContainerStarted","Data":"93047a2b20eba649708679dc5c7f764067b63076295c2824ce8f837a0aec61f2"} Dec 05 17:38:49 crc kubenswrapper[4753]: I1205 17:38:49.559633 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-api-0" event={"ID":"5f627cef-fbe5-40ef-beeb-e3b08861c449","Type":"ContainerStarted","Data":"637d0fa0b092e7aadda852493c76dc7e15c9dd5d909d23bc5900216a1a5ebe62"} Dec 05 17:38:49 crc kubenswrapper[4753]: I1205 17:38:49.559797 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cloudkitty-api-0" Dec 05 17:38:49 crc kubenswrapper[4753]: I1205 17:38:49.562037 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"11174666-3e97-47ce-90f5-55ee37dddf75","Type":"ContainerStarted","Data":"f393020d428b1132bbbde541bff6cac5a1e7b372d4fbffcb6f792c9c3a33a0e8"} Dec 05 17:38:49 crc kubenswrapper[4753]: I1205 17:38:49.587245 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-api-0" podStartSLOduration=2.5872279750000002 podStartE2EDuration="2.587227975s" podCreationTimestamp="2025-12-05 17:38:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:38:49.582650896 +0000 UTC m=+2068.085757902" watchObservedRunningTime="2025-12-05 17:38:49.587227975 +0000 UTC m=+2068.090334981" Dec 05 17:38:50 crc kubenswrapper[4753]: I1205 17:38:50.578839 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cloudkitty-proc-0" event={"ID":"11174666-3e97-47ce-90f5-55ee37dddf75","Type":"ContainerStarted","Data":"5cac3a19f6c031bb8ff6d1d163cb6fbf81fd85533ee495f692861450a12c6dd9"} Dec 05 17:38:50 crc kubenswrapper[4753]: I1205 17:38:50.610496 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cloudkitty-proc-0" podStartSLOduration=2.527426214 podStartE2EDuration="3.610475119s" podCreationTimestamp="2025-12-05 17:38:47 +0000 UTC" firstStartedPulling="2025-12-05 17:38:48.78701047 +0000 UTC m=+2067.290117476" lastFinishedPulling="2025-12-05 17:38:49.870059375 +0000 UTC m=+2068.373166381" observedRunningTime="2025-12-05 17:38:50.596209326 +0000 UTC m=+2069.099316332" watchObservedRunningTime="2025-12-05 17:38:50.610475119 +0000 UTC m=+2069.113582125" Dec 05 17:38:57 crc kubenswrapper[4753]: I1205 17:38:57.699414 4753 generic.go:334] "Generic (PLEG): container finished" podID="7a1f2600-cf85-45c5-8263-89810b0ba7ce" containerID="08aab619e28c9d9f7b357b93e6d17d7f57c3ac51b37d5ea5e1902e9d66da0084" exitCode=0 Dec 05 17:38:57 crc kubenswrapper[4753]: I1205 17:38:57.699527 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"7a1f2600-cf85-45c5-8263-89810b0ba7ce","Type":"ContainerDied","Data":"08aab619e28c9d9f7b357b93e6d17d7f57c3ac51b37d5ea5e1902e9d66da0084"} Dec 05 17:38:57 crc kubenswrapper[4753]: I1205 17:38:57.702391 4753 generic.go:334] "Generic (PLEG): container finished" podID="2bbb2b1a-5cf9-497c-9471-13ba1314167b" containerID="072c97280a4dfad39a7dcaec367afb1acac50c5df2b9aac21a3b511ef7de664c" exitCode=0 Dec 05 17:38:57 crc kubenswrapper[4753]: I1205 17:38:57.702439 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"2bbb2b1a-5cf9-497c-9471-13ba1314167b","Type":"ContainerDied","Data":"072c97280a4dfad39a7dcaec367afb1acac50c5df2b9aac21a3b511ef7de664c"} Dec 05 17:39:02 crc kubenswrapper[4753]: I1205 17:39:02.526617 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 05 17:39:02 crc kubenswrapper[4753]: I1205 17:39:02.766993 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"7a1f2600-cf85-45c5-8263-89810b0ba7ce","Type":"ContainerStarted","Data":"13d23714a9d32be1c1da97ab67abe2be7f39b7f5b82968bb977df035251e67fc"} Dec 05 17:39:02 crc kubenswrapper[4753]: I1205 17:39:02.768793 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:39:02 crc kubenswrapper[4753]: I1205 17:39:02.771867 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"2bbb2b1a-5cf9-497c-9471-13ba1314167b","Type":"ContainerStarted","Data":"7f47798b83bec4745dc918b6f6bfdf21fff8ca4fc8ea9753b307103eb5564a0f"} Dec 05 17:39:02 crc kubenswrapper[4753]: I1205 17:39:02.772642 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 05 17:39:02 crc kubenswrapper[4753]: I1205 17:39:02.775212 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-vjwb2" event={"ID":"d4b7df21-a189-41c8-9e93-c43d0eb552c5","Type":"ContainerStarted","Data":"5c9d494abe3e6475a1f375b69b805df5ab55a994b21bd41905018d92d383ff1b"} Dec 05 17:39:02 crc kubenswrapper[4753]: I1205 17:39:02.812013 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=41.811992714 podStartE2EDuration="41.811992714s" podCreationTimestamp="2025-12-05 17:38:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:39:02.803140504 +0000 UTC m=+2081.306247510" watchObservedRunningTime="2025-12-05 17:39:02.811992714 +0000 UTC m=+2081.315099710" Dec 05 17:39:02 crc kubenswrapper[4753]: I1205 17:39:02.852468 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=41.852432297 podStartE2EDuration="41.852432297s" podCreationTimestamp="2025-12-05 17:38:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:39:02.84298462 +0000 UTC m=+2081.346091646" watchObservedRunningTime="2025-12-05 17:39:02.852432297 +0000 UTC m=+2081.355539413" Dec 05 17:39:02 crc kubenswrapper[4753]: I1205 17:39:02.876492 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-vjwb2" podStartSLOduration=2.477591934 podStartE2EDuration="14.876468037s" podCreationTimestamp="2025-12-05 17:38:48 +0000 UTC" firstStartedPulling="2025-12-05 17:38:49.245087827 +0000 UTC m=+2067.748194833" lastFinishedPulling="2025-12-05 17:39:01.64396393 +0000 UTC m=+2080.147070936" observedRunningTime="2025-12-05 17:39:02.865763244 +0000 UTC m=+2081.368870280" watchObservedRunningTime="2025-12-05 17:39:02.876468037 +0000 UTC m=+2081.379575063" Dec 05 17:39:11 crc kubenswrapper[4753]: I1205 17:39:11.583387 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:39:11 crc kubenswrapper[4753]: I1205 17:39:11.912896 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 05 17:39:14 crc kubenswrapper[4753]: I1205 17:39:14.913567 4753 generic.go:334] "Generic (PLEG): container finished" podID="d4b7df21-a189-41c8-9e93-c43d0eb552c5" containerID="5c9d494abe3e6475a1f375b69b805df5ab55a994b21bd41905018d92d383ff1b" exitCode=0 Dec 05 17:39:14 crc kubenswrapper[4753]: I1205 17:39:14.913841 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-vjwb2" event={"ID":"d4b7df21-a189-41c8-9e93-c43d0eb552c5","Type":"ContainerDied","Data":"5c9d494abe3e6475a1f375b69b805df5ab55a994b21bd41905018d92d383ff1b"} Dec 05 17:39:16 crc kubenswrapper[4753]: I1205 17:39:16.595738 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-vjwb2" Dec 05 17:39:16 crc kubenswrapper[4753]: I1205 17:39:16.678665 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4b7df21-a189-41c8-9e93-c43d0eb552c5-repo-setup-combined-ca-bundle\") pod \"d4b7df21-a189-41c8-9e93-c43d0eb552c5\" (UID: \"d4b7df21-a189-41c8-9e93-c43d0eb552c5\") " Dec 05 17:39:16 crc kubenswrapper[4753]: I1205 17:39:16.678817 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d4b7df21-a189-41c8-9e93-c43d0eb552c5-inventory\") pod \"d4b7df21-a189-41c8-9e93-c43d0eb552c5\" (UID: \"d4b7df21-a189-41c8-9e93-c43d0eb552c5\") " Dec 05 17:39:16 crc kubenswrapper[4753]: I1205 17:39:16.678843 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-csjxb\" (UniqueName: \"kubernetes.io/projected/d4b7df21-a189-41c8-9e93-c43d0eb552c5-kube-api-access-csjxb\") pod \"d4b7df21-a189-41c8-9e93-c43d0eb552c5\" (UID: \"d4b7df21-a189-41c8-9e93-c43d0eb552c5\") " Dec 05 17:39:16 crc kubenswrapper[4753]: I1205 17:39:16.678860 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d4b7df21-a189-41c8-9e93-c43d0eb552c5-ssh-key\") pod \"d4b7df21-a189-41c8-9e93-c43d0eb552c5\" (UID: \"d4b7df21-a189-41c8-9e93-c43d0eb552c5\") " Dec 05 17:39:16 crc kubenswrapper[4753]: I1205 17:39:16.684936 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4b7df21-a189-41c8-9e93-c43d0eb552c5-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "d4b7df21-a189-41c8-9e93-c43d0eb552c5" (UID: "d4b7df21-a189-41c8-9e93-c43d0eb552c5"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:39:16 crc kubenswrapper[4753]: I1205 17:39:16.689877 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d4b7df21-a189-41c8-9e93-c43d0eb552c5-kube-api-access-csjxb" (OuterVolumeSpecName: "kube-api-access-csjxb") pod "d4b7df21-a189-41c8-9e93-c43d0eb552c5" (UID: "d4b7df21-a189-41c8-9e93-c43d0eb552c5"). InnerVolumeSpecName "kube-api-access-csjxb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:39:16 crc kubenswrapper[4753]: I1205 17:39:16.709534 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4b7df21-a189-41c8-9e93-c43d0eb552c5-inventory" (OuterVolumeSpecName: "inventory") pod "d4b7df21-a189-41c8-9e93-c43d0eb552c5" (UID: "d4b7df21-a189-41c8-9e93-c43d0eb552c5"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:39:16 crc kubenswrapper[4753]: I1205 17:39:16.732891 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4b7df21-a189-41c8-9e93-c43d0eb552c5-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "d4b7df21-a189-41c8-9e93-c43d0eb552c5" (UID: "d4b7df21-a189-41c8-9e93-c43d0eb552c5"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:39:16 crc kubenswrapper[4753]: I1205 17:39:16.787514 4753 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4b7df21-a189-41c8-9e93-c43d0eb552c5-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:39:16 crc kubenswrapper[4753]: I1205 17:39:16.787555 4753 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d4b7df21-a189-41c8-9e93-c43d0eb552c5-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 17:39:16 crc kubenswrapper[4753]: I1205 17:39:16.787568 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-csjxb\" (UniqueName: \"kubernetes.io/projected/d4b7df21-a189-41c8-9e93-c43d0eb552c5-kube-api-access-csjxb\") on node \"crc\" DevicePath \"\"" Dec 05 17:39:16 crc kubenswrapper[4753]: I1205 17:39:16.787579 4753 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d4b7df21-a189-41c8-9e93-c43d0eb552c5-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 17:39:16 crc kubenswrapper[4753]: I1205 17:39:16.948264 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-vjwb2" event={"ID":"d4b7df21-a189-41c8-9e93-c43d0eb552c5","Type":"ContainerDied","Data":"0087cd36f4cba2ed1f7615b074a05515a148d85e296df73af848a6a0b6c07b0b"} Dec 05 17:39:16 crc kubenswrapper[4753]: I1205 17:39:16.948309 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0087cd36f4cba2ed1f7615b074a05515a148d85e296df73af848a6a0b6c07b0b" Dec 05 17:39:16 crc kubenswrapper[4753]: I1205 17:39:16.948383 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-vjwb2" Dec 05 17:39:17 crc kubenswrapper[4753]: I1205 17:39:17.058262 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-k2657"] Dec 05 17:39:17 crc kubenswrapper[4753]: E1205 17:39:17.058974 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4b7df21-a189-41c8-9e93-c43d0eb552c5" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 05 17:39:17 crc kubenswrapper[4753]: I1205 17:39:17.058993 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4b7df21-a189-41c8-9e93-c43d0eb552c5" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 05 17:39:17 crc kubenswrapper[4753]: I1205 17:39:17.059255 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4b7df21-a189-41c8-9e93-c43d0eb552c5" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 05 17:39:17 crc kubenswrapper[4753]: I1205 17:39:17.060029 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-k2657" Dec 05 17:39:17 crc kubenswrapper[4753]: I1205 17:39:17.065663 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 17:39:17 crc kubenswrapper[4753]: I1205 17:39:17.065950 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-2445p" Dec 05 17:39:17 crc kubenswrapper[4753]: I1205 17:39:17.066082 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 17:39:17 crc kubenswrapper[4753]: I1205 17:39:17.066289 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 17:39:17 crc kubenswrapper[4753]: I1205 17:39:17.068657 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-k2657"] Dec 05 17:39:17 crc kubenswrapper[4753]: I1205 17:39:17.096159 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/228dc9b7-ee9e-48b8-bf86-f4265863f94a-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-k2657\" (UID: \"228dc9b7-ee9e-48b8-bf86-f4265863f94a\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-k2657" Dec 05 17:39:17 crc kubenswrapper[4753]: I1205 17:39:17.096232 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/228dc9b7-ee9e-48b8-bf86-f4265863f94a-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-k2657\" (UID: \"228dc9b7-ee9e-48b8-bf86-f4265863f94a\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-k2657" Dec 05 17:39:17 crc kubenswrapper[4753]: I1205 17:39:17.096257 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vwrsf\" (UniqueName: \"kubernetes.io/projected/228dc9b7-ee9e-48b8-bf86-f4265863f94a-kube-api-access-vwrsf\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-k2657\" (UID: \"228dc9b7-ee9e-48b8-bf86-f4265863f94a\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-k2657" Dec 05 17:39:17 crc kubenswrapper[4753]: I1205 17:39:17.197857 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/228dc9b7-ee9e-48b8-bf86-f4265863f94a-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-k2657\" (UID: \"228dc9b7-ee9e-48b8-bf86-f4265863f94a\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-k2657" Dec 05 17:39:17 crc kubenswrapper[4753]: I1205 17:39:17.197903 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/228dc9b7-ee9e-48b8-bf86-f4265863f94a-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-k2657\" (UID: \"228dc9b7-ee9e-48b8-bf86-f4265863f94a\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-k2657" Dec 05 17:39:17 crc kubenswrapper[4753]: I1205 17:39:17.197925 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vwrsf\" (UniqueName: \"kubernetes.io/projected/228dc9b7-ee9e-48b8-bf86-f4265863f94a-kube-api-access-vwrsf\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-k2657\" (UID: \"228dc9b7-ee9e-48b8-bf86-f4265863f94a\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-k2657" Dec 05 17:39:17 crc kubenswrapper[4753]: I1205 17:39:17.201927 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/228dc9b7-ee9e-48b8-bf86-f4265863f94a-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-k2657\" (UID: \"228dc9b7-ee9e-48b8-bf86-f4265863f94a\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-k2657" Dec 05 17:39:17 crc kubenswrapper[4753]: I1205 17:39:17.206809 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/228dc9b7-ee9e-48b8-bf86-f4265863f94a-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-k2657\" (UID: \"228dc9b7-ee9e-48b8-bf86-f4265863f94a\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-k2657" Dec 05 17:39:17 crc kubenswrapper[4753]: I1205 17:39:17.215886 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vwrsf\" (UniqueName: \"kubernetes.io/projected/228dc9b7-ee9e-48b8-bf86-f4265863f94a-kube-api-access-vwrsf\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-k2657\" (UID: \"228dc9b7-ee9e-48b8-bf86-f4265863f94a\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-k2657" Dec 05 17:39:17 crc kubenswrapper[4753]: I1205 17:39:17.377669 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-k2657" Dec 05 17:39:17 crc kubenswrapper[4753]: I1205 17:39:17.954646 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-k2657"] Dec 05 17:39:18 crc kubenswrapper[4753]: I1205 17:39:18.974852 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-k2657" event={"ID":"228dc9b7-ee9e-48b8-bf86-f4265863f94a","Type":"ContainerStarted","Data":"be4026f63fa86241a487786e0e2cdc01414684772f06ff881e9b06032392c192"} Dec 05 17:39:18 crc kubenswrapper[4753]: I1205 17:39:18.975222 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-k2657" event={"ID":"228dc9b7-ee9e-48b8-bf86-f4265863f94a","Type":"ContainerStarted","Data":"c5ded37fb9af0cd5e5443a92515b3b87b598b79ff7137b752fd5c72d4eb2fb78"} Dec 05 17:39:18 crc kubenswrapper[4753]: I1205 17:39:18.998534 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-k2657" podStartSLOduration=1.582040406 podStartE2EDuration="1.998516298s" podCreationTimestamp="2025-12-05 17:39:17 +0000 UTC" firstStartedPulling="2025-12-05 17:39:17.955840206 +0000 UTC m=+2096.458947222" lastFinishedPulling="2025-12-05 17:39:18.372316108 +0000 UTC m=+2096.875423114" observedRunningTime="2025-12-05 17:39:18.990188052 +0000 UTC m=+2097.493295068" watchObservedRunningTime="2025-12-05 17:39:18.998516298 +0000 UTC m=+2097.501623304" Dec 05 17:39:22 crc kubenswrapper[4753]: I1205 17:39:22.007322 4753 generic.go:334] "Generic (PLEG): container finished" podID="228dc9b7-ee9e-48b8-bf86-f4265863f94a" containerID="be4026f63fa86241a487786e0e2cdc01414684772f06ff881e9b06032392c192" exitCode=0 Dec 05 17:39:22 crc kubenswrapper[4753]: I1205 17:39:22.007850 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-k2657" event={"ID":"228dc9b7-ee9e-48b8-bf86-f4265863f94a","Type":"ContainerDied","Data":"be4026f63fa86241a487786e0e2cdc01414684772f06ff881e9b06032392c192"} Dec 05 17:39:23 crc kubenswrapper[4753]: I1205 17:39:23.565851 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-k2657" Dec 05 17:39:23 crc kubenswrapper[4753]: I1205 17:39:23.651834 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vwrsf\" (UniqueName: \"kubernetes.io/projected/228dc9b7-ee9e-48b8-bf86-f4265863f94a-kube-api-access-vwrsf\") pod \"228dc9b7-ee9e-48b8-bf86-f4265863f94a\" (UID: \"228dc9b7-ee9e-48b8-bf86-f4265863f94a\") " Dec 05 17:39:23 crc kubenswrapper[4753]: I1205 17:39:23.652096 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/228dc9b7-ee9e-48b8-bf86-f4265863f94a-inventory\") pod \"228dc9b7-ee9e-48b8-bf86-f4265863f94a\" (UID: \"228dc9b7-ee9e-48b8-bf86-f4265863f94a\") " Dec 05 17:39:23 crc kubenswrapper[4753]: I1205 17:39:23.652161 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/228dc9b7-ee9e-48b8-bf86-f4265863f94a-ssh-key\") pod \"228dc9b7-ee9e-48b8-bf86-f4265863f94a\" (UID: \"228dc9b7-ee9e-48b8-bf86-f4265863f94a\") " Dec 05 17:39:23 crc kubenswrapper[4753]: I1205 17:39:23.689392 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/228dc9b7-ee9e-48b8-bf86-f4265863f94a-kube-api-access-vwrsf" (OuterVolumeSpecName: "kube-api-access-vwrsf") pod "228dc9b7-ee9e-48b8-bf86-f4265863f94a" (UID: "228dc9b7-ee9e-48b8-bf86-f4265863f94a"). InnerVolumeSpecName "kube-api-access-vwrsf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:39:23 crc kubenswrapper[4753]: I1205 17:39:23.753901 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/228dc9b7-ee9e-48b8-bf86-f4265863f94a-inventory" (OuterVolumeSpecName: "inventory") pod "228dc9b7-ee9e-48b8-bf86-f4265863f94a" (UID: "228dc9b7-ee9e-48b8-bf86-f4265863f94a"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:39:23 crc kubenswrapper[4753]: I1205 17:39:23.754332 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/228dc9b7-ee9e-48b8-bf86-f4265863f94a-inventory\") pod \"228dc9b7-ee9e-48b8-bf86-f4265863f94a\" (UID: \"228dc9b7-ee9e-48b8-bf86-f4265863f94a\") " Dec 05 17:39:23 crc kubenswrapper[4753]: W1205 17:39:23.754602 4753 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/228dc9b7-ee9e-48b8-bf86-f4265863f94a/volumes/kubernetes.io~secret/inventory Dec 05 17:39:23 crc kubenswrapper[4753]: I1205 17:39:23.754700 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/228dc9b7-ee9e-48b8-bf86-f4265863f94a-inventory" (OuterVolumeSpecName: "inventory") pod "228dc9b7-ee9e-48b8-bf86-f4265863f94a" (UID: "228dc9b7-ee9e-48b8-bf86-f4265863f94a"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:39:23 crc kubenswrapper[4753]: I1205 17:39:23.755980 4753 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/228dc9b7-ee9e-48b8-bf86-f4265863f94a-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 17:39:23 crc kubenswrapper[4753]: I1205 17:39:23.756010 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vwrsf\" (UniqueName: \"kubernetes.io/projected/228dc9b7-ee9e-48b8-bf86-f4265863f94a-kube-api-access-vwrsf\") on node \"crc\" DevicePath \"\"" Dec 05 17:39:23 crc kubenswrapper[4753]: I1205 17:39:23.760356 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/228dc9b7-ee9e-48b8-bf86-f4265863f94a-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "228dc9b7-ee9e-48b8-bf86-f4265863f94a" (UID: "228dc9b7-ee9e-48b8-bf86-f4265863f94a"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:39:23 crc kubenswrapper[4753]: I1205 17:39:23.858239 4753 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/228dc9b7-ee9e-48b8-bf86-f4265863f94a-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 17:39:24 crc kubenswrapper[4753]: I1205 17:39:24.042393 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-k2657" event={"ID":"228dc9b7-ee9e-48b8-bf86-f4265863f94a","Type":"ContainerDied","Data":"c5ded37fb9af0cd5e5443a92515b3b87b598b79ff7137b752fd5c72d4eb2fb78"} Dec 05 17:39:24 crc kubenswrapper[4753]: I1205 17:39:24.042629 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c5ded37fb9af0cd5e5443a92515b3b87b598b79ff7137b752fd5c72d4eb2fb78" Dec 05 17:39:24 crc kubenswrapper[4753]: I1205 17:39:24.042518 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-k2657" Dec 05 17:39:24 crc kubenswrapper[4753]: I1205 17:39:24.122716 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fzcvn"] Dec 05 17:39:24 crc kubenswrapper[4753]: E1205 17:39:24.123464 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="228dc9b7-ee9e-48b8-bf86-f4265863f94a" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 05 17:39:24 crc kubenswrapper[4753]: I1205 17:39:24.123496 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="228dc9b7-ee9e-48b8-bf86-f4265863f94a" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 05 17:39:24 crc kubenswrapper[4753]: I1205 17:39:24.123870 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="228dc9b7-ee9e-48b8-bf86-f4265863f94a" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 05 17:39:24 crc kubenswrapper[4753]: I1205 17:39:24.125133 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fzcvn" Dec 05 17:39:24 crc kubenswrapper[4753]: I1205 17:39:24.128025 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 17:39:24 crc kubenswrapper[4753]: I1205 17:39:24.128265 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-2445p" Dec 05 17:39:24 crc kubenswrapper[4753]: I1205 17:39:24.128408 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 17:39:24 crc kubenswrapper[4753]: I1205 17:39:24.128569 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 17:39:24 crc kubenswrapper[4753]: I1205 17:39:24.139429 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fzcvn"] Dec 05 17:39:24 crc kubenswrapper[4753]: I1205 17:39:24.275244 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a40f9ec8-5379-4355-b524-fed440fdf2d6-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-fzcvn\" (UID: \"a40f9ec8-5379-4355-b524-fed440fdf2d6\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fzcvn" Dec 05 17:39:24 crc kubenswrapper[4753]: I1205 17:39:24.275476 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cqmtr\" (UniqueName: \"kubernetes.io/projected/a40f9ec8-5379-4355-b524-fed440fdf2d6-kube-api-access-cqmtr\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-fzcvn\" (UID: \"a40f9ec8-5379-4355-b524-fed440fdf2d6\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fzcvn" Dec 05 17:39:24 crc kubenswrapper[4753]: I1205 17:39:24.275609 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a40f9ec8-5379-4355-b524-fed440fdf2d6-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-fzcvn\" (UID: \"a40f9ec8-5379-4355-b524-fed440fdf2d6\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fzcvn" Dec 05 17:39:24 crc kubenswrapper[4753]: I1205 17:39:24.275654 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a40f9ec8-5379-4355-b524-fed440fdf2d6-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-fzcvn\" (UID: \"a40f9ec8-5379-4355-b524-fed440fdf2d6\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fzcvn" Dec 05 17:39:24 crc kubenswrapper[4753]: I1205 17:39:24.377863 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqmtr\" (UniqueName: \"kubernetes.io/projected/a40f9ec8-5379-4355-b524-fed440fdf2d6-kube-api-access-cqmtr\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-fzcvn\" (UID: \"a40f9ec8-5379-4355-b524-fed440fdf2d6\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fzcvn" Dec 05 17:39:24 crc kubenswrapper[4753]: I1205 17:39:24.377951 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a40f9ec8-5379-4355-b524-fed440fdf2d6-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-fzcvn\" (UID: \"a40f9ec8-5379-4355-b524-fed440fdf2d6\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fzcvn" Dec 05 17:39:24 crc kubenswrapper[4753]: I1205 17:39:24.377977 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a40f9ec8-5379-4355-b524-fed440fdf2d6-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-fzcvn\" (UID: \"a40f9ec8-5379-4355-b524-fed440fdf2d6\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fzcvn" Dec 05 17:39:24 crc kubenswrapper[4753]: I1205 17:39:24.378061 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a40f9ec8-5379-4355-b524-fed440fdf2d6-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-fzcvn\" (UID: \"a40f9ec8-5379-4355-b524-fed440fdf2d6\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fzcvn" Dec 05 17:39:24 crc kubenswrapper[4753]: I1205 17:39:24.383079 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a40f9ec8-5379-4355-b524-fed440fdf2d6-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-fzcvn\" (UID: \"a40f9ec8-5379-4355-b524-fed440fdf2d6\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fzcvn" Dec 05 17:39:24 crc kubenswrapper[4753]: I1205 17:39:24.383535 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a40f9ec8-5379-4355-b524-fed440fdf2d6-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-fzcvn\" (UID: \"a40f9ec8-5379-4355-b524-fed440fdf2d6\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fzcvn" Dec 05 17:39:24 crc kubenswrapper[4753]: I1205 17:39:24.384116 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a40f9ec8-5379-4355-b524-fed440fdf2d6-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-fzcvn\" (UID: \"a40f9ec8-5379-4355-b524-fed440fdf2d6\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fzcvn" Dec 05 17:39:24 crc kubenswrapper[4753]: I1205 17:39:24.396669 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqmtr\" (UniqueName: \"kubernetes.io/projected/a40f9ec8-5379-4355-b524-fed440fdf2d6-kube-api-access-cqmtr\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-fzcvn\" (UID: \"a40f9ec8-5379-4355-b524-fed440fdf2d6\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fzcvn" Dec 05 17:39:24 crc kubenswrapper[4753]: I1205 17:39:24.453520 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fzcvn" Dec 05 17:39:25 crc kubenswrapper[4753]: I1205 17:39:25.010479 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cloudkitty-api-0" Dec 05 17:39:25 crc kubenswrapper[4753]: I1205 17:39:25.211096 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fzcvn"] Dec 05 17:39:26 crc kubenswrapper[4753]: I1205 17:39:26.064847 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fzcvn" event={"ID":"a40f9ec8-5379-4355-b524-fed440fdf2d6","Type":"ContainerStarted","Data":"ce6f29bf62d487c2bd5400dd0dddffacd63996663aff0adb56bc3c59a81054be"} Dec 05 17:39:26 crc kubenswrapper[4753]: I1205 17:39:26.065195 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fzcvn" event={"ID":"a40f9ec8-5379-4355-b524-fed440fdf2d6","Type":"ContainerStarted","Data":"93f0da089ec80b996925df656f1573cd3ea6d1194d14232964f417eec2d98f50"} Dec 05 17:39:26 crc kubenswrapper[4753]: I1205 17:39:26.089495 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fzcvn" podStartSLOduration=1.688248873 podStartE2EDuration="2.089473354s" podCreationTimestamp="2025-12-05 17:39:24 +0000 UTC" firstStartedPulling="2025-12-05 17:39:25.239502639 +0000 UTC m=+2103.742609645" lastFinishedPulling="2025-12-05 17:39:25.64072711 +0000 UTC m=+2104.143834126" observedRunningTime="2025-12-05 17:39:26.081608482 +0000 UTC m=+2104.584715488" watchObservedRunningTime="2025-12-05 17:39:26.089473354 +0000 UTC m=+2104.592580360" Dec 05 17:39:28 crc kubenswrapper[4753]: I1205 17:39:28.389900 4753 scope.go:117] "RemoveContainer" containerID="446a6d48feef1dc049a50a1571760f8e2277f5954d496d300c7e59eeaf6e4a96" Dec 05 17:39:51 crc kubenswrapper[4753]: I1205 17:39:51.309887 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-zf6qh"] Dec 05 17:39:51 crc kubenswrapper[4753]: I1205 17:39:51.313511 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zf6qh" Dec 05 17:39:51 crc kubenswrapper[4753]: I1205 17:39:51.325917 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-zf6qh"] Dec 05 17:39:51 crc kubenswrapper[4753]: I1205 17:39:51.417322 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7dfe557a-2aca-474b-b7f1-d73ce9cbec19-catalog-content\") pod \"community-operators-zf6qh\" (UID: \"7dfe557a-2aca-474b-b7f1-d73ce9cbec19\") " pod="openshift-marketplace/community-operators-zf6qh" Dec 05 17:39:51 crc kubenswrapper[4753]: I1205 17:39:51.417491 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-br8lp\" (UniqueName: \"kubernetes.io/projected/7dfe557a-2aca-474b-b7f1-d73ce9cbec19-kube-api-access-br8lp\") pod \"community-operators-zf6qh\" (UID: \"7dfe557a-2aca-474b-b7f1-d73ce9cbec19\") " pod="openshift-marketplace/community-operators-zf6qh" Dec 05 17:39:51 crc kubenswrapper[4753]: I1205 17:39:51.417531 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7dfe557a-2aca-474b-b7f1-d73ce9cbec19-utilities\") pod \"community-operators-zf6qh\" (UID: \"7dfe557a-2aca-474b-b7f1-d73ce9cbec19\") " pod="openshift-marketplace/community-operators-zf6qh" Dec 05 17:39:51 crc kubenswrapper[4753]: I1205 17:39:51.519943 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-br8lp\" (UniqueName: \"kubernetes.io/projected/7dfe557a-2aca-474b-b7f1-d73ce9cbec19-kube-api-access-br8lp\") pod \"community-operators-zf6qh\" (UID: \"7dfe557a-2aca-474b-b7f1-d73ce9cbec19\") " pod="openshift-marketplace/community-operators-zf6qh" Dec 05 17:39:51 crc kubenswrapper[4753]: I1205 17:39:51.520010 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7dfe557a-2aca-474b-b7f1-d73ce9cbec19-utilities\") pod \"community-operators-zf6qh\" (UID: \"7dfe557a-2aca-474b-b7f1-d73ce9cbec19\") " pod="openshift-marketplace/community-operators-zf6qh" Dec 05 17:39:51 crc kubenswrapper[4753]: I1205 17:39:51.520071 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7dfe557a-2aca-474b-b7f1-d73ce9cbec19-catalog-content\") pod \"community-operators-zf6qh\" (UID: \"7dfe557a-2aca-474b-b7f1-d73ce9cbec19\") " pod="openshift-marketplace/community-operators-zf6qh" Dec 05 17:39:51 crc kubenswrapper[4753]: I1205 17:39:51.520650 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7dfe557a-2aca-474b-b7f1-d73ce9cbec19-catalog-content\") pod \"community-operators-zf6qh\" (UID: \"7dfe557a-2aca-474b-b7f1-d73ce9cbec19\") " pod="openshift-marketplace/community-operators-zf6qh" Dec 05 17:39:51 crc kubenswrapper[4753]: I1205 17:39:51.520654 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7dfe557a-2aca-474b-b7f1-d73ce9cbec19-utilities\") pod \"community-operators-zf6qh\" (UID: \"7dfe557a-2aca-474b-b7f1-d73ce9cbec19\") " pod="openshift-marketplace/community-operators-zf6qh" Dec 05 17:39:51 crc kubenswrapper[4753]: I1205 17:39:51.540477 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-br8lp\" (UniqueName: \"kubernetes.io/projected/7dfe557a-2aca-474b-b7f1-d73ce9cbec19-kube-api-access-br8lp\") pod \"community-operators-zf6qh\" (UID: \"7dfe557a-2aca-474b-b7f1-d73ce9cbec19\") " pod="openshift-marketplace/community-operators-zf6qh" Dec 05 17:39:51 crc kubenswrapper[4753]: I1205 17:39:51.636792 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zf6qh" Dec 05 17:39:52 crc kubenswrapper[4753]: I1205 17:39:52.129164 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-zf6qh"] Dec 05 17:39:52 crc kubenswrapper[4753]: I1205 17:39:52.370251 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zf6qh" event={"ID":"7dfe557a-2aca-474b-b7f1-d73ce9cbec19","Type":"ContainerStarted","Data":"4bbabcf336956d6e1b912c70b757afc383415565b5711fb3c250d9bf9a673a74"} Dec 05 17:39:53 crc kubenswrapper[4753]: I1205 17:39:53.382028 4753 generic.go:334] "Generic (PLEG): container finished" podID="7dfe557a-2aca-474b-b7f1-d73ce9cbec19" containerID="7506ecbcc56316620ef5ac508c4033325ab19c638553b04f82654fc8e239fa87" exitCode=0 Dec 05 17:39:53 crc kubenswrapper[4753]: I1205 17:39:53.382288 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zf6qh" event={"ID":"7dfe557a-2aca-474b-b7f1-d73ce9cbec19","Type":"ContainerDied","Data":"7506ecbcc56316620ef5ac508c4033325ab19c638553b04f82654fc8e239fa87"} Dec 05 17:39:54 crc kubenswrapper[4753]: I1205 17:39:54.395030 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zf6qh" event={"ID":"7dfe557a-2aca-474b-b7f1-d73ce9cbec19","Type":"ContainerStarted","Data":"119ca2df1dd88034a709d339c1b3311ff9f9218880a4e09cfca5fc2ebee4ace0"} Dec 05 17:39:55 crc kubenswrapper[4753]: I1205 17:39:55.412556 4753 generic.go:334] "Generic (PLEG): container finished" podID="7dfe557a-2aca-474b-b7f1-d73ce9cbec19" containerID="119ca2df1dd88034a709d339c1b3311ff9f9218880a4e09cfca5fc2ebee4ace0" exitCode=0 Dec 05 17:39:55 crc kubenswrapper[4753]: I1205 17:39:55.412671 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zf6qh" event={"ID":"7dfe557a-2aca-474b-b7f1-d73ce9cbec19","Type":"ContainerDied","Data":"119ca2df1dd88034a709d339c1b3311ff9f9218880a4e09cfca5fc2ebee4ace0"} Dec 05 17:39:56 crc kubenswrapper[4753]: I1205 17:39:56.430974 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zf6qh" event={"ID":"7dfe557a-2aca-474b-b7f1-d73ce9cbec19","Type":"ContainerStarted","Data":"1a1ad978ad08439267a90fa2d26543c9bebed6a37ec1db65b71e5030b414bdcd"} Dec 05 17:39:56 crc kubenswrapper[4753]: I1205 17:39:56.456562 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-zf6qh" podStartSLOduration=2.939503287 podStartE2EDuration="5.456542862s" podCreationTimestamp="2025-12-05 17:39:51 +0000 UTC" firstStartedPulling="2025-12-05 17:39:53.384613363 +0000 UTC m=+2131.887720369" lastFinishedPulling="2025-12-05 17:39:55.901652918 +0000 UTC m=+2134.404759944" observedRunningTime="2025-12-05 17:39:56.451046386 +0000 UTC m=+2134.954153402" watchObservedRunningTime="2025-12-05 17:39:56.456542862 +0000 UTC m=+2134.959649878" Dec 05 17:39:58 crc kubenswrapper[4753]: I1205 17:39:58.699662 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-4b8xm"] Dec 05 17:39:58 crc kubenswrapper[4753]: I1205 17:39:58.702636 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4b8xm" Dec 05 17:39:58 crc kubenswrapper[4753]: I1205 17:39:58.710070 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-4b8xm"] Dec 05 17:39:58 crc kubenswrapper[4753]: I1205 17:39:58.875285 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4f9e2c84-73bf-4532-b38b-e58c3e6ca707-catalog-content\") pod \"redhat-operators-4b8xm\" (UID: \"4f9e2c84-73bf-4532-b38b-e58c3e6ca707\") " pod="openshift-marketplace/redhat-operators-4b8xm" Dec 05 17:39:58 crc kubenswrapper[4753]: I1205 17:39:58.875417 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xd6hb\" (UniqueName: \"kubernetes.io/projected/4f9e2c84-73bf-4532-b38b-e58c3e6ca707-kube-api-access-xd6hb\") pod \"redhat-operators-4b8xm\" (UID: \"4f9e2c84-73bf-4532-b38b-e58c3e6ca707\") " pod="openshift-marketplace/redhat-operators-4b8xm" Dec 05 17:39:58 crc kubenswrapper[4753]: I1205 17:39:58.875617 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4f9e2c84-73bf-4532-b38b-e58c3e6ca707-utilities\") pod \"redhat-operators-4b8xm\" (UID: \"4f9e2c84-73bf-4532-b38b-e58c3e6ca707\") " pod="openshift-marketplace/redhat-operators-4b8xm" Dec 05 17:39:58 crc kubenswrapper[4753]: I1205 17:39:58.978121 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4f9e2c84-73bf-4532-b38b-e58c3e6ca707-utilities\") pod \"redhat-operators-4b8xm\" (UID: \"4f9e2c84-73bf-4532-b38b-e58c3e6ca707\") " pod="openshift-marketplace/redhat-operators-4b8xm" Dec 05 17:39:58 crc kubenswrapper[4753]: I1205 17:39:58.978332 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4f9e2c84-73bf-4532-b38b-e58c3e6ca707-catalog-content\") pod \"redhat-operators-4b8xm\" (UID: \"4f9e2c84-73bf-4532-b38b-e58c3e6ca707\") " pod="openshift-marketplace/redhat-operators-4b8xm" Dec 05 17:39:58 crc kubenswrapper[4753]: I1205 17:39:58.978387 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xd6hb\" (UniqueName: \"kubernetes.io/projected/4f9e2c84-73bf-4532-b38b-e58c3e6ca707-kube-api-access-xd6hb\") pod \"redhat-operators-4b8xm\" (UID: \"4f9e2c84-73bf-4532-b38b-e58c3e6ca707\") " pod="openshift-marketplace/redhat-operators-4b8xm" Dec 05 17:39:58 crc kubenswrapper[4753]: I1205 17:39:58.978573 4753 patch_prober.go:28] interesting pod/machine-config-daemon-khn68 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 17:39:58 crc kubenswrapper[4753]: I1205 17:39:58.978633 4753 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 17:39:58 crc kubenswrapper[4753]: I1205 17:39:58.978675 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4f9e2c84-73bf-4532-b38b-e58c3e6ca707-utilities\") pod \"redhat-operators-4b8xm\" (UID: \"4f9e2c84-73bf-4532-b38b-e58c3e6ca707\") " pod="openshift-marketplace/redhat-operators-4b8xm" Dec 05 17:39:58 crc kubenswrapper[4753]: I1205 17:39:58.978795 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4f9e2c84-73bf-4532-b38b-e58c3e6ca707-catalog-content\") pod \"redhat-operators-4b8xm\" (UID: \"4f9e2c84-73bf-4532-b38b-e58c3e6ca707\") " pod="openshift-marketplace/redhat-operators-4b8xm" Dec 05 17:39:59 crc kubenswrapper[4753]: I1205 17:39:59.005465 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xd6hb\" (UniqueName: \"kubernetes.io/projected/4f9e2c84-73bf-4532-b38b-e58c3e6ca707-kube-api-access-xd6hb\") pod \"redhat-operators-4b8xm\" (UID: \"4f9e2c84-73bf-4532-b38b-e58c3e6ca707\") " pod="openshift-marketplace/redhat-operators-4b8xm" Dec 05 17:39:59 crc kubenswrapper[4753]: I1205 17:39:59.039215 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4b8xm" Dec 05 17:39:59 crc kubenswrapper[4753]: W1205 17:39:59.523383 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4f9e2c84_73bf_4532_b38b_e58c3e6ca707.slice/crio-12b1d7d9f62f885ec5bd335e86a303e7d622c694bbfad0a6bc1ca407fc92047f WatchSource:0}: Error finding container 12b1d7d9f62f885ec5bd335e86a303e7d622c694bbfad0a6bc1ca407fc92047f: Status 404 returned error can't find the container with id 12b1d7d9f62f885ec5bd335e86a303e7d622c694bbfad0a6bc1ca407fc92047f Dec 05 17:39:59 crc kubenswrapper[4753]: I1205 17:39:59.541248 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-4b8xm"] Dec 05 17:40:00 crc kubenswrapper[4753]: I1205 17:40:00.481694 4753 generic.go:334] "Generic (PLEG): container finished" podID="4f9e2c84-73bf-4532-b38b-e58c3e6ca707" containerID="8cde9ff455f3d286ea9af9d059ea9e1604877a0a59a4e1d2fda6b9e55c7b05f5" exitCode=0 Dec 05 17:40:00 crc kubenswrapper[4753]: I1205 17:40:00.481758 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4b8xm" event={"ID":"4f9e2c84-73bf-4532-b38b-e58c3e6ca707","Type":"ContainerDied","Data":"8cde9ff455f3d286ea9af9d059ea9e1604877a0a59a4e1d2fda6b9e55c7b05f5"} Dec 05 17:40:00 crc kubenswrapper[4753]: I1205 17:40:00.482090 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4b8xm" event={"ID":"4f9e2c84-73bf-4532-b38b-e58c3e6ca707","Type":"ContainerStarted","Data":"12b1d7d9f62f885ec5bd335e86a303e7d622c694bbfad0a6bc1ca407fc92047f"} Dec 05 17:40:01 crc kubenswrapper[4753]: I1205 17:40:01.495329 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4b8xm" event={"ID":"4f9e2c84-73bf-4532-b38b-e58c3e6ca707","Type":"ContainerStarted","Data":"49cfc17d23df036733d03eca774f67f6f855d84c35361fec947b4d599fcf38de"} Dec 05 17:40:01 crc kubenswrapper[4753]: I1205 17:40:01.637852 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-zf6qh" Dec 05 17:40:01 crc kubenswrapper[4753]: I1205 17:40:01.637967 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-zf6qh" Dec 05 17:40:01 crc kubenswrapper[4753]: I1205 17:40:01.695855 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-zf6qh" Dec 05 17:40:02 crc kubenswrapper[4753]: I1205 17:40:02.583239 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-zf6qh" Dec 05 17:40:03 crc kubenswrapper[4753]: I1205 17:40:03.891619 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-zf6qh"] Dec 05 17:40:04 crc kubenswrapper[4753]: I1205 17:40:04.527257 4753 generic.go:334] "Generic (PLEG): container finished" podID="4f9e2c84-73bf-4532-b38b-e58c3e6ca707" containerID="49cfc17d23df036733d03eca774f67f6f855d84c35361fec947b4d599fcf38de" exitCode=0 Dec 05 17:40:04 crc kubenswrapper[4753]: I1205 17:40:04.527326 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4b8xm" event={"ID":"4f9e2c84-73bf-4532-b38b-e58c3e6ca707","Type":"ContainerDied","Data":"49cfc17d23df036733d03eca774f67f6f855d84c35361fec947b4d599fcf38de"} Dec 05 17:40:05 crc kubenswrapper[4753]: I1205 17:40:05.541371 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-zf6qh" podUID="7dfe557a-2aca-474b-b7f1-d73ce9cbec19" containerName="registry-server" containerID="cri-o://1a1ad978ad08439267a90fa2d26543c9bebed6a37ec1db65b71e5030b414bdcd" gracePeriod=2 Dec 05 17:40:05 crc kubenswrapper[4753]: I1205 17:40:05.541932 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4b8xm" event={"ID":"4f9e2c84-73bf-4532-b38b-e58c3e6ca707","Type":"ContainerStarted","Data":"661f661a38de1bbc416c22e3a5841b053bbb399239354bbba9480d6b367201ad"} Dec 05 17:40:05 crc kubenswrapper[4753]: I1205 17:40:05.573423 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-4b8xm" podStartSLOduration=3.099205807 podStartE2EDuration="7.573403431s" podCreationTimestamp="2025-12-05 17:39:58 +0000 UTC" firstStartedPulling="2025-12-05 17:40:00.484287056 +0000 UTC m=+2138.987394062" lastFinishedPulling="2025-12-05 17:40:04.95848468 +0000 UTC m=+2143.461591686" observedRunningTime="2025-12-05 17:40:05.563575943 +0000 UTC m=+2144.066682949" watchObservedRunningTime="2025-12-05 17:40:05.573403431 +0000 UTC m=+2144.076510437" Dec 05 17:40:06 crc kubenswrapper[4753]: I1205 17:40:06.190884 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zf6qh" Dec 05 17:40:06 crc kubenswrapper[4753]: I1205 17:40:06.231993 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7dfe557a-2aca-474b-b7f1-d73ce9cbec19-catalog-content\") pod \"7dfe557a-2aca-474b-b7f1-d73ce9cbec19\" (UID: \"7dfe557a-2aca-474b-b7f1-d73ce9cbec19\") " Dec 05 17:40:06 crc kubenswrapper[4753]: I1205 17:40:06.232100 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-br8lp\" (UniqueName: \"kubernetes.io/projected/7dfe557a-2aca-474b-b7f1-d73ce9cbec19-kube-api-access-br8lp\") pod \"7dfe557a-2aca-474b-b7f1-d73ce9cbec19\" (UID: \"7dfe557a-2aca-474b-b7f1-d73ce9cbec19\") " Dec 05 17:40:06 crc kubenswrapper[4753]: I1205 17:40:06.232237 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7dfe557a-2aca-474b-b7f1-d73ce9cbec19-utilities\") pod \"7dfe557a-2aca-474b-b7f1-d73ce9cbec19\" (UID: \"7dfe557a-2aca-474b-b7f1-d73ce9cbec19\") " Dec 05 17:40:06 crc kubenswrapper[4753]: I1205 17:40:06.233248 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7dfe557a-2aca-474b-b7f1-d73ce9cbec19-utilities" (OuterVolumeSpecName: "utilities") pod "7dfe557a-2aca-474b-b7f1-d73ce9cbec19" (UID: "7dfe557a-2aca-474b-b7f1-d73ce9cbec19"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:40:06 crc kubenswrapper[4753]: I1205 17:40:06.242038 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7dfe557a-2aca-474b-b7f1-d73ce9cbec19-kube-api-access-br8lp" (OuterVolumeSpecName: "kube-api-access-br8lp") pod "7dfe557a-2aca-474b-b7f1-d73ce9cbec19" (UID: "7dfe557a-2aca-474b-b7f1-d73ce9cbec19"). InnerVolumeSpecName "kube-api-access-br8lp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:40:06 crc kubenswrapper[4753]: I1205 17:40:06.283878 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7dfe557a-2aca-474b-b7f1-d73ce9cbec19-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7dfe557a-2aca-474b-b7f1-d73ce9cbec19" (UID: "7dfe557a-2aca-474b-b7f1-d73ce9cbec19"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:40:06 crc kubenswrapper[4753]: I1205 17:40:06.335100 4753 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7dfe557a-2aca-474b-b7f1-d73ce9cbec19-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 17:40:06 crc kubenswrapper[4753]: I1205 17:40:06.335161 4753 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7dfe557a-2aca-474b-b7f1-d73ce9cbec19-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 17:40:06 crc kubenswrapper[4753]: I1205 17:40:06.335177 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-br8lp\" (UniqueName: \"kubernetes.io/projected/7dfe557a-2aca-474b-b7f1-d73ce9cbec19-kube-api-access-br8lp\") on node \"crc\" DevicePath \"\"" Dec 05 17:40:06 crc kubenswrapper[4753]: I1205 17:40:06.553108 4753 generic.go:334] "Generic (PLEG): container finished" podID="7dfe557a-2aca-474b-b7f1-d73ce9cbec19" containerID="1a1ad978ad08439267a90fa2d26543c9bebed6a37ec1db65b71e5030b414bdcd" exitCode=0 Dec 05 17:40:06 crc kubenswrapper[4753]: I1205 17:40:06.553344 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zf6qh" Dec 05 17:40:06 crc kubenswrapper[4753]: I1205 17:40:06.553367 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zf6qh" event={"ID":"7dfe557a-2aca-474b-b7f1-d73ce9cbec19","Type":"ContainerDied","Data":"1a1ad978ad08439267a90fa2d26543c9bebed6a37ec1db65b71e5030b414bdcd"} Dec 05 17:40:06 crc kubenswrapper[4753]: I1205 17:40:06.554313 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zf6qh" event={"ID":"7dfe557a-2aca-474b-b7f1-d73ce9cbec19","Type":"ContainerDied","Data":"4bbabcf336956d6e1b912c70b757afc383415565b5711fb3c250d9bf9a673a74"} Dec 05 17:40:06 crc kubenswrapper[4753]: I1205 17:40:06.554356 4753 scope.go:117] "RemoveContainer" containerID="1a1ad978ad08439267a90fa2d26543c9bebed6a37ec1db65b71e5030b414bdcd" Dec 05 17:40:06 crc kubenswrapper[4753]: I1205 17:40:06.584809 4753 scope.go:117] "RemoveContainer" containerID="119ca2df1dd88034a709d339c1b3311ff9f9218880a4e09cfca5fc2ebee4ace0" Dec 05 17:40:06 crc kubenswrapper[4753]: I1205 17:40:06.625240 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-zf6qh"] Dec 05 17:40:06 crc kubenswrapper[4753]: I1205 17:40:06.632265 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-zf6qh"] Dec 05 17:40:06 crc kubenswrapper[4753]: I1205 17:40:06.656968 4753 scope.go:117] "RemoveContainer" containerID="7506ecbcc56316620ef5ac508c4033325ab19c638553b04f82654fc8e239fa87" Dec 05 17:40:06 crc kubenswrapper[4753]: I1205 17:40:06.688066 4753 scope.go:117] "RemoveContainer" containerID="1a1ad978ad08439267a90fa2d26543c9bebed6a37ec1db65b71e5030b414bdcd" Dec 05 17:40:06 crc kubenswrapper[4753]: E1205 17:40:06.688587 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1a1ad978ad08439267a90fa2d26543c9bebed6a37ec1db65b71e5030b414bdcd\": container with ID starting with 1a1ad978ad08439267a90fa2d26543c9bebed6a37ec1db65b71e5030b414bdcd not found: ID does not exist" containerID="1a1ad978ad08439267a90fa2d26543c9bebed6a37ec1db65b71e5030b414bdcd" Dec 05 17:40:06 crc kubenswrapper[4753]: I1205 17:40:06.688620 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1a1ad978ad08439267a90fa2d26543c9bebed6a37ec1db65b71e5030b414bdcd"} err="failed to get container status \"1a1ad978ad08439267a90fa2d26543c9bebed6a37ec1db65b71e5030b414bdcd\": rpc error: code = NotFound desc = could not find container \"1a1ad978ad08439267a90fa2d26543c9bebed6a37ec1db65b71e5030b414bdcd\": container with ID starting with 1a1ad978ad08439267a90fa2d26543c9bebed6a37ec1db65b71e5030b414bdcd not found: ID does not exist" Dec 05 17:40:06 crc kubenswrapper[4753]: I1205 17:40:06.688643 4753 scope.go:117] "RemoveContainer" containerID="119ca2df1dd88034a709d339c1b3311ff9f9218880a4e09cfca5fc2ebee4ace0" Dec 05 17:40:06 crc kubenswrapper[4753]: E1205 17:40:06.689388 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"119ca2df1dd88034a709d339c1b3311ff9f9218880a4e09cfca5fc2ebee4ace0\": container with ID starting with 119ca2df1dd88034a709d339c1b3311ff9f9218880a4e09cfca5fc2ebee4ace0 not found: ID does not exist" containerID="119ca2df1dd88034a709d339c1b3311ff9f9218880a4e09cfca5fc2ebee4ace0" Dec 05 17:40:06 crc kubenswrapper[4753]: I1205 17:40:06.689437 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"119ca2df1dd88034a709d339c1b3311ff9f9218880a4e09cfca5fc2ebee4ace0"} err="failed to get container status \"119ca2df1dd88034a709d339c1b3311ff9f9218880a4e09cfca5fc2ebee4ace0\": rpc error: code = NotFound desc = could not find container \"119ca2df1dd88034a709d339c1b3311ff9f9218880a4e09cfca5fc2ebee4ace0\": container with ID starting with 119ca2df1dd88034a709d339c1b3311ff9f9218880a4e09cfca5fc2ebee4ace0 not found: ID does not exist" Dec 05 17:40:06 crc kubenswrapper[4753]: I1205 17:40:06.689475 4753 scope.go:117] "RemoveContainer" containerID="7506ecbcc56316620ef5ac508c4033325ab19c638553b04f82654fc8e239fa87" Dec 05 17:40:06 crc kubenswrapper[4753]: E1205 17:40:06.689769 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7506ecbcc56316620ef5ac508c4033325ab19c638553b04f82654fc8e239fa87\": container with ID starting with 7506ecbcc56316620ef5ac508c4033325ab19c638553b04f82654fc8e239fa87 not found: ID does not exist" containerID="7506ecbcc56316620ef5ac508c4033325ab19c638553b04f82654fc8e239fa87" Dec 05 17:40:06 crc kubenswrapper[4753]: I1205 17:40:06.689805 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7506ecbcc56316620ef5ac508c4033325ab19c638553b04f82654fc8e239fa87"} err="failed to get container status \"7506ecbcc56316620ef5ac508c4033325ab19c638553b04f82654fc8e239fa87\": rpc error: code = NotFound desc = could not find container \"7506ecbcc56316620ef5ac508c4033325ab19c638553b04f82654fc8e239fa87\": container with ID starting with 7506ecbcc56316620ef5ac508c4033325ab19c638553b04f82654fc8e239fa87 not found: ID does not exist" Dec 05 17:40:07 crc kubenswrapper[4753]: I1205 17:40:07.736260 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7dfe557a-2aca-474b-b7f1-d73ce9cbec19" path="/var/lib/kubelet/pods/7dfe557a-2aca-474b-b7f1-d73ce9cbec19/volumes" Dec 05 17:40:09 crc kubenswrapper[4753]: I1205 17:40:09.039588 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-4b8xm" Dec 05 17:40:09 crc kubenswrapper[4753]: I1205 17:40:09.039654 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-4b8xm" Dec 05 17:40:10 crc kubenswrapper[4753]: I1205 17:40:10.090736 4753 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-4b8xm" podUID="4f9e2c84-73bf-4532-b38b-e58c3e6ca707" containerName="registry-server" probeResult="failure" output=< Dec 05 17:40:10 crc kubenswrapper[4753]: timeout: failed to connect service ":50051" within 1s Dec 05 17:40:10 crc kubenswrapper[4753]: > Dec 05 17:40:19 crc kubenswrapper[4753]: I1205 17:40:19.096473 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-4b8xm" Dec 05 17:40:19 crc kubenswrapper[4753]: I1205 17:40:19.162776 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-4b8xm" Dec 05 17:40:19 crc kubenswrapper[4753]: I1205 17:40:19.339673 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-4b8xm"] Dec 05 17:40:20 crc kubenswrapper[4753]: I1205 17:40:20.730596 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-4b8xm" podUID="4f9e2c84-73bf-4532-b38b-e58c3e6ca707" containerName="registry-server" containerID="cri-o://661f661a38de1bbc416c22e3a5841b053bbb399239354bbba9480d6b367201ad" gracePeriod=2 Dec 05 17:40:21 crc kubenswrapper[4753]: I1205 17:40:21.384780 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4b8xm" Dec 05 17:40:21 crc kubenswrapper[4753]: I1205 17:40:21.477905 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xd6hb\" (UniqueName: \"kubernetes.io/projected/4f9e2c84-73bf-4532-b38b-e58c3e6ca707-kube-api-access-xd6hb\") pod \"4f9e2c84-73bf-4532-b38b-e58c3e6ca707\" (UID: \"4f9e2c84-73bf-4532-b38b-e58c3e6ca707\") " Dec 05 17:40:21 crc kubenswrapper[4753]: I1205 17:40:21.478216 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4f9e2c84-73bf-4532-b38b-e58c3e6ca707-utilities\") pod \"4f9e2c84-73bf-4532-b38b-e58c3e6ca707\" (UID: \"4f9e2c84-73bf-4532-b38b-e58c3e6ca707\") " Dec 05 17:40:21 crc kubenswrapper[4753]: I1205 17:40:21.478353 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4f9e2c84-73bf-4532-b38b-e58c3e6ca707-catalog-content\") pod \"4f9e2c84-73bf-4532-b38b-e58c3e6ca707\" (UID: \"4f9e2c84-73bf-4532-b38b-e58c3e6ca707\") " Dec 05 17:40:21 crc kubenswrapper[4753]: I1205 17:40:21.479666 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4f9e2c84-73bf-4532-b38b-e58c3e6ca707-utilities" (OuterVolumeSpecName: "utilities") pod "4f9e2c84-73bf-4532-b38b-e58c3e6ca707" (UID: "4f9e2c84-73bf-4532-b38b-e58c3e6ca707"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:40:21 crc kubenswrapper[4753]: I1205 17:40:21.489436 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4f9e2c84-73bf-4532-b38b-e58c3e6ca707-kube-api-access-xd6hb" (OuterVolumeSpecName: "kube-api-access-xd6hb") pod "4f9e2c84-73bf-4532-b38b-e58c3e6ca707" (UID: "4f9e2c84-73bf-4532-b38b-e58c3e6ca707"). InnerVolumeSpecName "kube-api-access-xd6hb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:40:21 crc kubenswrapper[4753]: I1205 17:40:21.580949 4753 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4f9e2c84-73bf-4532-b38b-e58c3e6ca707-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 17:40:21 crc kubenswrapper[4753]: I1205 17:40:21.580989 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xd6hb\" (UniqueName: \"kubernetes.io/projected/4f9e2c84-73bf-4532-b38b-e58c3e6ca707-kube-api-access-xd6hb\") on node \"crc\" DevicePath \"\"" Dec 05 17:40:21 crc kubenswrapper[4753]: I1205 17:40:21.620264 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4f9e2c84-73bf-4532-b38b-e58c3e6ca707-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4f9e2c84-73bf-4532-b38b-e58c3e6ca707" (UID: "4f9e2c84-73bf-4532-b38b-e58c3e6ca707"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:40:21 crc kubenswrapper[4753]: I1205 17:40:21.682877 4753 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4f9e2c84-73bf-4532-b38b-e58c3e6ca707-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 17:40:21 crc kubenswrapper[4753]: I1205 17:40:21.769644 4753 generic.go:334] "Generic (PLEG): container finished" podID="4f9e2c84-73bf-4532-b38b-e58c3e6ca707" containerID="661f661a38de1bbc416c22e3a5841b053bbb399239354bbba9480d6b367201ad" exitCode=0 Dec 05 17:40:21 crc kubenswrapper[4753]: I1205 17:40:21.769708 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4b8xm" event={"ID":"4f9e2c84-73bf-4532-b38b-e58c3e6ca707","Type":"ContainerDied","Data":"661f661a38de1bbc416c22e3a5841b053bbb399239354bbba9480d6b367201ad"} Dec 05 17:40:21 crc kubenswrapper[4753]: I1205 17:40:21.769738 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4b8xm" event={"ID":"4f9e2c84-73bf-4532-b38b-e58c3e6ca707","Type":"ContainerDied","Data":"12b1d7d9f62f885ec5bd335e86a303e7d622c694bbfad0a6bc1ca407fc92047f"} Dec 05 17:40:21 crc kubenswrapper[4753]: I1205 17:40:21.769772 4753 scope.go:117] "RemoveContainer" containerID="661f661a38de1bbc416c22e3a5841b053bbb399239354bbba9480d6b367201ad" Dec 05 17:40:21 crc kubenswrapper[4753]: I1205 17:40:21.769802 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4b8xm" Dec 05 17:40:21 crc kubenswrapper[4753]: I1205 17:40:21.807587 4753 scope.go:117] "RemoveContainer" containerID="49cfc17d23df036733d03eca774f67f6f855d84c35361fec947b4d599fcf38de" Dec 05 17:40:21 crc kubenswrapper[4753]: I1205 17:40:21.809317 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-4b8xm"] Dec 05 17:40:21 crc kubenswrapper[4753]: I1205 17:40:21.837392 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-4b8xm"] Dec 05 17:40:21 crc kubenswrapper[4753]: I1205 17:40:21.838321 4753 scope.go:117] "RemoveContainer" containerID="8cde9ff455f3d286ea9af9d059ea9e1604877a0a59a4e1d2fda6b9e55c7b05f5" Dec 05 17:40:21 crc kubenswrapper[4753]: I1205 17:40:21.880554 4753 scope.go:117] "RemoveContainer" containerID="661f661a38de1bbc416c22e3a5841b053bbb399239354bbba9480d6b367201ad" Dec 05 17:40:21 crc kubenswrapper[4753]: E1205 17:40:21.881142 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"661f661a38de1bbc416c22e3a5841b053bbb399239354bbba9480d6b367201ad\": container with ID starting with 661f661a38de1bbc416c22e3a5841b053bbb399239354bbba9480d6b367201ad not found: ID does not exist" containerID="661f661a38de1bbc416c22e3a5841b053bbb399239354bbba9480d6b367201ad" Dec 05 17:40:21 crc kubenswrapper[4753]: I1205 17:40:21.881192 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"661f661a38de1bbc416c22e3a5841b053bbb399239354bbba9480d6b367201ad"} err="failed to get container status \"661f661a38de1bbc416c22e3a5841b053bbb399239354bbba9480d6b367201ad\": rpc error: code = NotFound desc = could not find container \"661f661a38de1bbc416c22e3a5841b053bbb399239354bbba9480d6b367201ad\": container with ID starting with 661f661a38de1bbc416c22e3a5841b053bbb399239354bbba9480d6b367201ad not found: ID does not exist" Dec 05 17:40:21 crc kubenswrapper[4753]: I1205 17:40:21.881221 4753 scope.go:117] "RemoveContainer" containerID="49cfc17d23df036733d03eca774f67f6f855d84c35361fec947b4d599fcf38de" Dec 05 17:40:21 crc kubenswrapper[4753]: E1205 17:40:21.881681 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"49cfc17d23df036733d03eca774f67f6f855d84c35361fec947b4d599fcf38de\": container with ID starting with 49cfc17d23df036733d03eca774f67f6f855d84c35361fec947b4d599fcf38de not found: ID does not exist" containerID="49cfc17d23df036733d03eca774f67f6f855d84c35361fec947b4d599fcf38de" Dec 05 17:40:21 crc kubenswrapper[4753]: I1205 17:40:21.881712 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"49cfc17d23df036733d03eca774f67f6f855d84c35361fec947b4d599fcf38de"} err="failed to get container status \"49cfc17d23df036733d03eca774f67f6f855d84c35361fec947b4d599fcf38de\": rpc error: code = NotFound desc = could not find container \"49cfc17d23df036733d03eca774f67f6f855d84c35361fec947b4d599fcf38de\": container with ID starting with 49cfc17d23df036733d03eca774f67f6f855d84c35361fec947b4d599fcf38de not found: ID does not exist" Dec 05 17:40:21 crc kubenswrapper[4753]: I1205 17:40:21.881733 4753 scope.go:117] "RemoveContainer" containerID="8cde9ff455f3d286ea9af9d059ea9e1604877a0a59a4e1d2fda6b9e55c7b05f5" Dec 05 17:40:21 crc kubenswrapper[4753]: E1205 17:40:21.882171 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8cde9ff455f3d286ea9af9d059ea9e1604877a0a59a4e1d2fda6b9e55c7b05f5\": container with ID starting with 8cde9ff455f3d286ea9af9d059ea9e1604877a0a59a4e1d2fda6b9e55c7b05f5 not found: ID does not exist" containerID="8cde9ff455f3d286ea9af9d059ea9e1604877a0a59a4e1d2fda6b9e55c7b05f5" Dec 05 17:40:21 crc kubenswrapper[4753]: I1205 17:40:21.882280 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8cde9ff455f3d286ea9af9d059ea9e1604877a0a59a4e1d2fda6b9e55c7b05f5"} err="failed to get container status \"8cde9ff455f3d286ea9af9d059ea9e1604877a0a59a4e1d2fda6b9e55c7b05f5\": rpc error: code = NotFound desc = could not find container \"8cde9ff455f3d286ea9af9d059ea9e1604877a0a59a4e1d2fda6b9e55c7b05f5\": container with ID starting with 8cde9ff455f3d286ea9af9d059ea9e1604877a0a59a4e1d2fda6b9e55c7b05f5 not found: ID does not exist" Dec 05 17:40:23 crc kubenswrapper[4753]: I1205 17:40:23.747352 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4f9e2c84-73bf-4532-b38b-e58c3e6ca707" path="/var/lib/kubelet/pods/4f9e2c84-73bf-4532-b38b-e58c3e6ca707/volumes" Dec 05 17:40:24 crc kubenswrapper[4753]: E1205 17:40:24.105744 4753 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4f9e2c84_73bf_4532_b38b_e58c3e6ca707.slice/crio-12b1d7d9f62f885ec5bd335e86a303e7d622c694bbfad0a6bc1ca407fc92047f\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4f9e2c84_73bf_4532_b38b_e58c3e6ca707.slice\": RecentStats: unable to find data in memory cache]" Dec 05 17:40:28 crc kubenswrapper[4753]: I1205 17:40:28.519874 4753 scope.go:117] "RemoveContainer" containerID="418ce809484f34822deed56f5718ae8c28ae220b7a1026936f73ce1f6350bb9c" Dec 05 17:40:28 crc kubenswrapper[4753]: I1205 17:40:28.978907 4753 patch_prober.go:28] interesting pod/machine-config-daemon-khn68 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 17:40:28 crc kubenswrapper[4753]: I1205 17:40:28.978986 4753 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 17:40:34 crc kubenswrapper[4753]: E1205 17:40:34.358231 4753 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4f9e2c84_73bf_4532_b38b_e58c3e6ca707.slice/crio-12b1d7d9f62f885ec5bd335e86a303e7d622c694bbfad0a6bc1ca407fc92047f\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4f9e2c84_73bf_4532_b38b_e58c3e6ca707.slice\": RecentStats: unable to find data in memory cache]" Dec 05 17:40:44 crc kubenswrapper[4753]: E1205 17:40:44.676806 4753 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4f9e2c84_73bf_4532_b38b_e58c3e6ca707.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4f9e2c84_73bf_4532_b38b_e58c3e6ca707.slice/crio-12b1d7d9f62f885ec5bd335e86a303e7d622c694bbfad0a6bc1ca407fc92047f\": RecentStats: unable to find data in memory cache]" Dec 05 17:40:54 crc kubenswrapper[4753]: E1205 17:40:54.963200 4753 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4f9e2c84_73bf_4532_b38b_e58c3e6ca707.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4f9e2c84_73bf_4532_b38b_e58c3e6ca707.slice/crio-12b1d7d9f62f885ec5bd335e86a303e7d622c694bbfad0a6bc1ca407fc92047f\": RecentStats: unable to find data in memory cache]" Dec 05 17:40:58 crc kubenswrapper[4753]: I1205 17:40:58.979020 4753 patch_prober.go:28] interesting pod/machine-config-daemon-khn68 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 17:40:58 crc kubenswrapper[4753]: I1205 17:40:58.979783 4753 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 17:40:58 crc kubenswrapper[4753]: I1205 17:40:58.979844 4753 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-khn68" Dec 05 17:40:58 crc kubenswrapper[4753]: I1205 17:40:58.980627 4753 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"991d8322b4cfd6073d1f5ac6852a07230df5501090d0dc07c17c7fbcde13dc9e"} pod="openshift-machine-config-operator/machine-config-daemon-khn68" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 17:40:58 crc kubenswrapper[4753]: I1205 17:40:58.980696 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerName="machine-config-daemon" containerID="cri-o://991d8322b4cfd6073d1f5ac6852a07230df5501090d0dc07c17c7fbcde13dc9e" gracePeriod=600 Dec 05 17:40:59 crc kubenswrapper[4753]: I1205 17:40:59.297491 4753 generic.go:334] "Generic (PLEG): container finished" podID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerID="991d8322b4cfd6073d1f5ac6852a07230df5501090d0dc07c17c7fbcde13dc9e" exitCode=0 Dec 05 17:40:59 crc kubenswrapper[4753]: I1205 17:40:59.297526 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" event={"ID":"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68","Type":"ContainerDied","Data":"991d8322b4cfd6073d1f5ac6852a07230df5501090d0dc07c17c7fbcde13dc9e"} Dec 05 17:40:59 crc kubenswrapper[4753]: I1205 17:40:59.297785 4753 scope.go:117] "RemoveContainer" containerID="87682a74661e693e498cd793cc20d16fc9f4a3b8a1a6b54f10285e2dcd15eafd" Dec 05 17:41:00 crc kubenswrapper[4753]: I1205 17:41:00.315966 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" event={"ID":"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68","Type":"ContainerStarted","Data":"1a837c74d8c8977a86c74839c544fee61e72d58cfc2be715d8c463cb4d09c5d8"} Dec 05 17:41:05 crc kubenswrapper[4753]: E1205 17:41:05.307361 4753 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4f9e2c84_73bf_4532_b38b_e58c3e6ca707.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4f9e2c84_73bf_4532_b38b_e58c3e6ca707.slice/crio-12b1d7d9f62f885ec5bd335e86a303e7d622c694bbfad0a6bc1ca407fc92047f\": RecentStats: unable to find data in memory cache]" Dec 05 17:41:15 crc kubenswrapper[4753]: E1205 17:41:15.626138 4753 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4f9e2c84_73bf_4532_b38b_e58c3e6ca707.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4f9e2c84_73bf_4532_b38b_e58c3e6ca707.slice/crio-12b1d7d9f62f885ec5bd335e86a303e7d622c694bbfad0a6bc1ca407fc92047f\": RecentStats: unable to find data in memory cache]" Dec 05 17:41:28 crc kubenswrapper[4753]: I1205 17:41:28.632978 4753 scope.go:117] "RemoveContainer" containerID="56de0dcec4169c18c85433352029914ff2efd1c1bb8af2c7a6d4a87f80f63ae2" Dec 05 17:41:28 crc kubenswrapper[4753]: I1205 17:41:28.683998 4753 scope.go:117] "RemoveContainer" containerID="c5f58392ec576df89b4f9cd989a1fb83d6ddbae0091ba5b8bc4fbf6c2b5a7364" Dec 05 17:41:29 crc kubenswrapper[4753]: I1205 17:41:29.525071 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-spzzl"] Dec 05 17:41:29 crc kubenswrapper[4753]: E1205 17:41:29.525510 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f9e2c84-73bf-4532-b38b-e58c3e6ca707" containerName="registry-server" Dec 05 17:41:29 crc kubenswrapper[4753]: I1205 17:41:29.525529 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f9e2c84-73bf-4532-b38b-e58c3e6ca707" containerName="registry-server" Dec 05 17:41:29 crc kubenswrapper[4753]: E1205 17:41:29.525542 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f9e2c84-73bf-4532-b38b-e58c3e6ca707" containerName="extract-utilities" Dec 05 17:41:29 crc kubenswrapper[4753]: I1205 17:41:29.525549 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f9e2c84-73bf-4532-b38b-e58c3e6ca707" containerName="extract-utilities" Dec 05 17:41:29 crc kubenswrapper[4753]: E1205 17:41:29.525565 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7dfe557a-2aca-474b-b7f1-d73ce9cbec19" containerName="extract-content" Dec 05 17:41:29 crc kubenswrapper[4753]: I1205 17:41:29.525572 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="7dfe557a-2aca-474b-b7f1-d73ce9cbec19" containerName="extract-content" Dec 05 17:41:29 crc kubenswrapper[4753]: E1205 17:41:29.525583 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f9e2c84-73bf-4532-b38b-e58c3e6ca707" containerName="extract-content" Dec 05 17:41:29 crc kubenswrapper[4753]: I1205 17:41:29.525590 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f9e2c84-73bf-4532-b38b-e58c3e6ca707" containerName="extract-content" Dec 05 17:41:29 crc kubenswrapper[4753]: E1205 17:41:29.525602 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7dfe557a-2aca-474b-b7f1-d73ce9cbec19" containerName="registry-server" Dec 05 17:41:29 crc kubenswrapper[4753]: I1205 17:41:29.525607 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="7dfe557a-2aca-474b-b7f1-d73ce9cbec19" containerName="registry-server" Dec 05 17:41:29 crc kubenswrapper[4753]: E1205 17:41:29.525628 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7dfe557a-2aca-474b-b7f1-d73ce9cbec19" containerName="extract-utilities" Dec 05 17:41:29 crc kubenswrapper[4753]: I1205 17:41:29.525634 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="7dfe557a-2aca-474b-b7f1-d73ce9cbec19" containerName="extract-utilities" Dec 05 17:41:29 crc kubenswrapper[4753]: I1205 17:41:29.525864 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="7dfe557a-2aca-474b-b7f1-d73ce9cbec19" containerName="registry-server" Dec 05 17:41:29 crc kubenswrapper[4753]: I1205 17:41:29.525878 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f9e2c84-73bf-4532-b38b-e58c3e6ca707" containerName="registry-server" Dec 05 17:41:29 crc kubenswrapper[4753]: I1205 17:41:29.527421 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-spzzl" Dec 05 17:41:29 crc kubenswrapper[4753]: I1205 17:41:29.553790 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-spzzl"] Dec 05 17:41:29 crc kubenswrapper[4753]: I1205 17:41:29.629357 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dqrrk\" (UniqueName: \"kubernetes.io/projected/3d846119-9bc3-4d02-b5ce-96bac2685077-kube-api-access-dqrrk\") pod \"certified-operators-spzzl\" (UID: \"3d846119-9bc3-4d02-b5ce-96bac2685077\") " pod="openshift-marketplace/certified-operators-spzzl" Dec 05 17:41:29 crc kubenswrapper[4753]: I1205 17:41:29.629694 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3d846119-9bc3-4d02-b5ce-96bac2685077-utilities\") pod \"certified-operators-spzzl\" (UID: \"3d846119-9bc3-4d02-b5ce-96bac2685077\") " pod="openshift-marketplace/certified-operators-spzzl" Dec 05 17:41:29 crc kubenswrapper[4753]: I1205 17:41:29.629757 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3d846119-9bc3-4d02-b5ce-96bac2685077-catalog-content\") pod \"certified-operators-spzzl\" (UID: \"3d846119-9bc3-4d02-b5ce-96bac2685077\") " pod="openshift-marketplace/certified-operators-spzzl" Dec 05 17:41:29 crc kubenswrapper[4753]: I1205 17:41:29.731781 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3d846119-9bc3-4d02-b5ce-96bac2685077-utilities\") pod \"certified-operators-spzzl\" (UID: \"3d846119-9bc3-4d02-b5ce-96bac2685077\") " pod="openshift-marketplace/certified-operators-spzzl" Dec 05 17:41:29 crc kubenswrapper[4753]: I1205 17:41:29.731862 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3d846119-9bc3-4d02-b5ce-96bac2685077-catalog-content\") pod \"certified-operators-spzzl\" (UID: \"3d846119-9bc3-4d02-b5ce-96bac2685077\") " pod="openshift-marketplace/certified-operators-spzzl" Dec 05 17:41:29 crc kubenswrapper[4753]: I1205 17:41:29.731954 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dqrrk\" (UniqueName: \"kubernetes.io/projected/3d846119-9bc3-4d02-b5ce-96bac2685077-kube-api-access-dqrrk\") pod \"certified-operators-spzzl\" (UID: \"3d846119-9bc3-4d02-b5ce-96bac2685077\") " pod="openshift-marketplace/certified-operators-spzzl" Dec 05 17:41:29 crc kubenswrapper[4753]: I1205 17:41:29.732920 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3d846119-9bc3-4d02-b5ce-96bac2685077-utilities\") pod \"certified-operators-spzzl\" (UID: \"3d846119-9bc3-4d02-b5ce-96bac2685077\") " pod="openshift-marketplace/certified-operators-spzzl" Dec 05 17:41:29 crc kubenswrapper[4753]: I1205 17:41:29.733259 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3d846119-9bc3-4d02-b5ce-96bac2685077-catalog-content\") pod \"certified-operators-spzzl\" (UID: \"3d846119-9bc3-4d02-b5ce-96bac2685077\") " pod="openshift-marketplace/certified-operators-spzzl" Dec 05 17:41:29 crc kubenswrapper[4753]: I1205 17:41:29.754849 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dqrrk\" (UniqueName: \"kubernetes.io/projected/3d846119-9bc3-4d02-b5ce-96bac2685077-kube-api-access-dqrrk\") pod \"certified-operators-spzzl\" (UID: \"3d846119-9bc3-4d02-b5ce-96bac2685077\") " pod="openshift-marketplace/certified-operators-spzzl" Dec 05 17:41:29 crc kubenswrapper[4753]: I1205 17:41:29.860671 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-spzzl" Dec 05 17:41:30 crc kubenswrapper[4753]: I1205 17:41:30.387073 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-spzzl"] Dec 05 17:41:30 crc kubenswrapper[4753]: I1205 17:41:30.710353 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-spzzl" event={"ID":"3d846119-9bc3-4d02-b5ce-96bac2685077","Type":"ContainerStarted","Data":"87a3538fea88b1d01c32e810361a3cc988fc1d7702ec5e8ad7523a5f2bf10d0b"} Dec 05 17:41:31 crc kubenswrapper[4753]: I1205 17:41:31.727815 4753 generic.go:334] "Generic (PLEG): container finished" podID="3d846119-9bc3-4d02-b5ce-96bac2685077" containerID="00c49c9446a70ef9e2803508a348820e756ae553193f8360a245533fb1ca086f" exitCode=0 Dec 05 17:41:31 crc kubenswrapper[4753]: I1205 17:41:31.737942 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-spzzl" event={"ID":"3d846119-9bc3-4d02-b5ce-96bac2685077","Type":"ContainerDied","Data":"00c49c9446a70ef9e2803508a348820e756ae553193f8360a245533fb1ca086f"} Dec 05 17:41:32 crc kubenswrapper[4753]: I1205 17:41:32.741494 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-spzzl" event={"ID":"3d846119-9bc3-4d02-b5ce-96bac2685077","Type":"ContainerStarted","Data":"fcda1066aecc70e5c087860a691a6cde24b9f592afa688ebbc81ab528e7b0b26"} Dec 05 17:41:33 crc kubenswrapper[4753]: I1205 17:41:33.754047 4753 generic.go:334] "Generic (PLEG): container finished" podID="3d846119-9bc3-4d02-b5ce-96bac2685077" containerID="fcda1066aecc70e5c087860a691a6cde24b9f592afa688ebbc81ab528e7b0b26" exitCode=0 Dec 05 17:41:33 crc kubenswrapper[4753]: I1205 17:41:33.754289 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-spzzl" event={"ID":"3d846119-9bc3-4d02-b5ce-96bac2685077","Type":"ContainerDied","Data":"fcda1066aecc70e5c087860a691a6cde24b9f592afa688ebbc81ab528e7b0b26"} Dec 05 17:41:34 crc kubenswrapper[4753]: I1205 17:41:34.805088 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-spzzl" event={"ID":"3d846119-9bc3-4d02-b5ce-96bac2685077","Type":"ContainerStarted","Data":"86fd8c76c3a26cd90b2a6b27d4e79965682b07c5b763013acf8a4b01cb863256"} Dec 05 17:41:39 crc kubenswrapper[4753]: I1205 17:41:39.861431 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-spzzl" Dec 05 17:41:39 crc kubenswrapper[4753]: I1205 17:41:39.861996 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-spzzl" Dec 05 17:41:39 crc kubenswrapper[4753]: I1205 17:41:39.923642 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-spzzl" Dec 05 17:41:39 crc kubenswrapper[4753]: I1205 17:41:39.940060 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-spzzl" podStartSLOduration=8.518401209 podStartE2EDuration="10.940036379s" podCreationTimestamp="2025-12-05 17:41:29 +0000 UTC" firstStartedPulling="2025-12-05 17:41:31.740583912 +0000 UTC m=+2230.243690918" lastFinishedPulling="2025-12-05 17:41:34.162219072 +0000 UTC m=+2232.665326088" observedRunningTime="2025-12-05 17:41:34.827985735 +0000 UTC m=+2233.331092741" watchObservedRunningTime="2025-12-05 17:41:39.940036379 +0000 UTC m=+2238.443143425" Dec 05 17:41:40 crc kubenswrapper[4753]: I1205 17:41:40.940325 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-spzzl" Dec 05 17:41:41 crc kubenswrapper[4753]: I1205 17:41:41.002627 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-spzzl"] Dec 05 17:41:42 crc kubenswrapper[4753]: I1205 17:41:42.896973 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-spzzl" podUID="3d846119-9bc3-4d02-b5ce-96bac2685077" containerName="registry-server" containerID="cri-o://86fd8c76c3a26cd90b2a6b27d4e79965682b07c5b763013acf8a4b01cb863256" gracePeriod=2 Dec 05 17:41:43 crc kubenswrapper[4753]: I1205 17:41:43.478575 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-spzzl" Dec 05 17:41:43 crc kubenswrapper[4753]: I1205 17:41:43.534873 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3d846119-9bc3-4d02-b5ce-96bac2685077-catalog-content\") pod \"3d846119-9bc3-4d02-b5ce-96bac2685077\" (UID: \"3d846119-9bc3-4d02-b5ce-96bac2685077\") " Dec 05 17:41:43 crc kubenswrapper[4753]: I1205 17:41:43.534997 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dqrrk\" (UniqueName: \"kubernetes.io/projected/3d846119-9bc3-4d02-b5ce-96bac2685077-kube-api-access-dqrrk\") pod \"3d846119-9bc3-4d02-b5ce-96bac2685077\" (UID: \"3d846119-9bc3-4d02-b5ce-96bac2685077\") " Dec 05 17:41:43 crc kubenswrapper[4753]: I1205 17:41:43.542914 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3d846119-9bc3-4d02-b5ce-96bac2685077-kube-api-access-dqrrk" (OuterVolumeSpecName: "kube-api-access-dqrrk") pod "3d846119-9bc3-4d02-b5ce-96bac2685077" (UID: "3d846119-9bc3-4d02-b5ce-96bac2685077"). InnerVolumeSpecName "kube-api-access-dqrrk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:41:43 crc kubenswrapper[4753]: I1205 17:41:43.637811 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3d846119-9bc3-4d02-b5ce-96bac2685077-utilities\") pod \"3d846119-9bc3-4d02-b5ce-96bac2685077\" (UID: \"3d846119-9bc3-4d02-b5ce-96bac2685077\") " Dec 05 17:41:43 crc kubenswrapper[4753]: I1205 17:41:43.638761 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dqrrk\" (UniqueName: \"kubernetes.io/projected/3d846119-9bc3-4d02-b5ce-96bac2685077-kube-api-access-dqrrk\") on node \"crc\" DevicePath \"\"" Dec 05 17:41:43 crc kubenswrapper[4753]: I1205 17:41:43.638908 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3d846119-9bc3-4d02-b5ce-96bac2685077-utilities" (OuterVolumeSpecName: "utilities") pod "3d846119-9bc3-4d02-b5ce-96bac2685077" (UID: "3d846119-9bc3-4d02-b5ce-96bac2685077"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:41:43 crc kubenswrapper[4753]: I1205 17:41:43.696820 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3d846119-9bc3-4d02-b5ce-96bac2685077-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3d846119-9bc3-4d02-b5ce-96bac2685077" (UID: "3d846119-9bc3-4d02-b5ce-96bac2685077"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:41:43 crc kubenswrapper[4753]: I1205 17:41:43.741597 4753 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3d846119-9bc3-4d02-b5ce-96bac2685077-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 17:41:43 crc kubenswrapper[4753]: I1205 17:41:43.741661 4753 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3d846119-9bc3-4d02-b5ce-96bac2685077-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 17:41:43 crc kubenswrapper[4753]: I1205 17:41:43.945897 4753 generic.go:334] "Generic (PLEG): container finished" podID="3d846119-9bc3-4d02-b5ce-96bac2685077" containerID="86fd8c76c3a26cd90b2a6b27d4e79965682b07c5b763013acf8a4b01cb863256" exitCode=0 Dec 05 17:41:43 crc kubenswrapper[4753]: I1205 17:41:43.945944 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-spzzl" event={"ID":"3d846119-9bc3-4d02-b5ce-96bac2685077","Type":"ContainerDied","Data":"86fd8c76c3a26cd90b2a6b27d4e79965682b07c5b763013acf8a4b01cb863256"} Dec 05 17:41:43 crc kubenswrapper[4753]: I1205 17:41:43.945977 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-spzzl" event={"ID":"3d846119-9bc3-4d02-b5ce-96bac2685077","Type":"ContainerDied","Data":"87a3538fea88b1d01c32e810361a3cc988fc1d7702ec5e8ad7523a5f2bf10d0b"} Dec 05 17:41:43 crc kubenswrapper[4753]: I1205 17:41:43.945996 4753 scope.go:117] "RemoveContainer" containerID="86fd8c76c3a26cd90b2a6b27d4e79965682b07c5b763013acf8a4b01cb863256" Dec 05 17:41:43 crc kubenswrapper[4753]: I1205 17:41:43.946994 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-spzzl" Dec 05 17:41:43 crc kubenswrapper[4753]: I1205 17:41:43.975430 4753 scope.go:117] "RemoveContainer" containerID="fcda1066aecc70e5c087860a691a6cde24b9f592afa688ebbc81ab528e7b0b26" Dec 05 17:41:43 crc kubenswrapper[4753]: I1205 17:41:43.979597 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-spzzl"] Dec 05 17:41:43 crc kubenswrapper[4753]: I1205 17:41:43.993644 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-spzzl"] Dec 05 17:41:44 crc kubenswrapper[4753]: I1205 17:41:44.002905 4753 scope.go:117] "RemoveContainer" containerID="00c49c9446a70ef9e2803508a348820e756ae553193f8360a245533fb1ca086f" Dec 05 17:41:44 crc kubenswrapper[4753]: I1205 17:41:44.061754 4753 scope.go:117] "RemoveContainer" containerID="86fd8c76c3a26cd90b2a6b27d4e79965682b07c5b763013acf8a4b01cb863256" Dec 05 17:41:44 crc kubenswrapper[4753]: E1205 17:41:44.062353 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"86fd8c76c3a26cd90b2a6b27d4e79965682b07c5b763013acf8a4b01cb863256\": container with ID starting with 86fd8c76c3a26cd90b2a6b27d4e79965682b07c5b763013acf8a4b01cb863256 not found: ID does not exist" containerID="86fd8c76c3a26cd90b2a6b27d4e79965682b07c5b763013acf8a4b01cb863256" Dec 05 17:41:44 crc kubenswrapper[4753]: I1205 17:41:44.062412 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"86fd8c76c3a26cd90b2a6b27d4e79965682b07c5b763013acf8a4b01cb863256"} err="failed to get container status \"86fd8c76c3a26cd90b2a6b27d4e79965682b07c5b763013acf8a4b01cb863256\": rpc error: code = NotFound desc = could not find container \"86fd8c76c3a26cd90b2a6b27d4e79965682b07c5b763013acf8a4b01cb863256\": container with ID starting with 86fd8c76c3a26cd90b2a6b27d4e79965682b07c5b763013acf8a4b01cb863256 not found: ID does not exist" Dec 05 17:41:44 crc kubenswrapper[4753]: I1205 17:41:44.062452 4753 scope.go:117] "RemoveContainer" containerID="fcda1066aecc70e5c087860a691a6cde24b9f592afa688ebbc81ab528e7b0b26" Dec 05 17:41:44 crc kubenswrapper[4753]: E1205 17:41:44.062886 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fcda1066aecc70e5c087860a691a6cde24b9f592afa688ebbc81ab528e7b0b26\": container with ID starting with fcda1066aecc70e5c087860a691a6cde24b9f592afa688ebbc81ab528e7b0b26 not found: ID does not exist" containerID="fcda1066aecc70e5c087860a691a6cde24b9f592afa688ebbc81ab528e7b0b26" Dec 05 17:41:44 crc kubenswrapper[4753]: I1205 17:41:44.062980 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fcda1066aecc70e5c087860a691a6cde24b9f592afa688ebbc81ab528e7b0b26"} err="failed to get container status \"fcda1066aecc70e5c087860a691a6cde24b9f592afa688ebbc81ab528e7b0b26\": rpc error: code = NotFound desc = could not find container \"fcda1066aecc70e5c087860a691a6cde24b9f592afa688ebbc81ab528e7b0b26\": container with ID starting with fcda1066aecc70e5c087860a691a6cde24b9f592afa688ebbc81ab528e7b0b26 not found: ID does not exist" Dec 05 17:41:44 crc kubenswrapper[4753]: I1205 17:41:44.063058 4753 scope.go:117] "RemoveContainer" containerID="00c49c9446a70ef9e2803508a348820e756ae553193f8360a245533fb1ca086f" Dec 05 17:41:44 crc kubenswrapper[4753]: E1205 17:41:44.063392 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"00c49c9446a70ef9e2803508a348820e756ae553193f8360a245533fb1ca086f\": container with ID starting with 00c49c9446a70ef9e2803508a348820e756ae553193f8360a245533fb1ca086f not found: ID does not exist" containerID="00c49c9446a70ef9e2803508a348820e756ae553193f8360a245533fb1ca086f" Dec 05 17:41:44 crc kubenswrapper[4753]: I1205 17:41:44.063445 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"00c49c9446a70ef9e2803508a348820e756ae553193f8360a245533fb1ca086f"} err="failed to get container status \"00c49c9446a70ef9e2803508a348820e756ae553193f8360a245533fb1ca086f\": rpc error: code = NotFound desc = could not find container \"00c49c9446a70ef9e2803508a348820e756ae553193f8360a245533fb1ca086f\": container with ID starting with 00c49c9446a70ef9e2803508a348820e756ae553193f8360a245533fb1ca086f not found: ID does not exist" Dec 05 17:41:45 crc kubenswrapper[4753]: I1205 17:41:45.738309 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3d846119-9bc3-4d02-b5ce-96bac2685077" path="/var/lib/kubelet/pods/3d846119-9bc3-4d02-b5ce-96bac2685077/volumes" Dec 05 17:42:28 crc kubenswrapper[4753]: I1205 17:42:28.845730 4753 scope.go:117] "RemoveContainer" containerID="cd1a97ef33bbfa59eae87dbb1a186e797c7a2f5c8003981a5a799d327e2dc85a" Dec 05 17:42:28 crc kubenswrapper[4753]: I1205 17:42:28.877488 4753 scope.go:117] "RemoveContainer" containerID="61214eff942b4b978be3d8c47ea8c8d87c4cf7770e784621e5c97bc0d55cdaa8" Dec 05 17:42:28 crc kubenswrapper[4753]: I1205 17:42:28.899890 4753 scope.go:117] "RemoveContainer" containerID="368ddfd5ece7e3ca53b5af287ae2501e74ffc1b7945c30fac3f67b1d3437c325" Dec 05 17:42:28 crc kubenswrapper[4753]: I1205 17:42:28.920328 4753 scope.go:117] "RemoveContainer" containerID="d9d9921fbb8be6eb6483ed171caed7437d62b4b41cc48e60278296fdbcb16d4c" Dec 05 17:42:28 crc kubenswrapper[4753]: I1205 17:42:28.947793 4753 scope.go:117] "RemoveContainer" containerID="15d22364dd7fae564b8cf65f5e227c20e8a31f065fdfb27a20746185026ca508" Dec 05 17:42:28 crc kubenswrapper[4753]: I1205 17:42:28.979414 4753 scope.go:117] "RemoveContainer" containerID="4b9fb2b4f2010e35837ba67636a32951274452bdc063bf803a8d53a1cf51f373" Dec 05 17:42:29 crc kubenswrapper[4753]: I1205 17:42:29.016875 4753 scope.go:117] "RemoveContainer" containerID="66cf734570f26d4816f153692dac7b97d2bb5769dfd7e28d005ee6a7e6c8d96b" Dec 05 17:42:29 crc kubenswrapper[4753]: I1205 17:42:29.043173 4753 scope.go:117] "RemoveContainer" containerID="f8d8c6b51e78b11df564815a8800e8f02479847d968808269980faba71113b94" Dec 05 17:42:29 crc kubenswrapper[4753]: I1205 17:42:29.075500 4753 scope.go:117] "RemoveContainer" containerID="754da66f80e8abfd29131c4f6b9277e7f437795ba065e64706bbf9808cd6e548" Dec 05 17:42:29 crc kubenswrapper[4753]: I1205 17:42:29.136569 4753 scope.go:117] "RemoveContainer" containerID="2c4337dcb536a1e002d2255fe04957fcb4e198f271acd152c67e0171fae87288" Dec 05 17:43:18 crc kubenswrapper[4753]: I1205 17:43:18.114253 4753 generic.go:334] "Generic (PLEG): container finished" podID="a40f9ec8-5379-4355-b524-fed440fdf2d6" containerID="ce6f29bf62d487c2bd5400dd0dddffacd63996663aff0adb56bc3c59a81054be" exitCode=0 Dec 05 17:43:18 crc kubenswrapper[4753]: I1205 17:43:18.114376 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fzcvn" event={"ID":"a40f9ec8-5379-4355-b524-fed440fdf2d6","Type":"ContainerDied","Data":"ce6f29bf62d487c2bd5400dd0dddffacd63996663aff0adb56bc3c59a81054be"} Dec 05 17:43:19 crc kubenswrapper[4753]: I1205 17:43:19.694724 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fzcvn" Dec 05 17:43:19 crc kubenswrapper[4753]: I1205 17:43:19.809332 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a40f9ec8-5379-4355-b524-fed440fdf2d6-ssh-key\") pod \"a40f9ec8-5379-4355-b524-fed440fdf2d6\" (UID: \"a40f9ec8-5379-4355-b524-fed440fdf2d6\") " Dec 05 17:43:19 crc kubenswrapper[4753]: I1205 17:43:19.809563 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a40f9ec8-5379-4355-b524-fed440fdf2d6-bootstrap-combined-ca-bundle\") pod \"a40f9ec8-5379-4355-b524-fed440fdf2d6\" (UID: \"a40f9ec8-5379-4355-b524-fed440fdf2d6\") " Dec 05 17:43:19 crc kubenswrapper[4753]: I1205 17:43:19.809607 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cqmtr\" (UniqueName: \"kubernetes.io/projected/a40f9ec8-5379-4355-b524-fed440fdf2d6-kube-api-access-cqmtr\") pod \"a40f9ec8-5379-4355-b524-fed440fdf2d6\" (UID: \"a40f9ec8-5379-4355-b524-fed440fdf2d6\") " Dec 05 17:43:19 crc kubenswrapper[4753]: I1205 17:43:19.809636 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a40f9ec8-5379-4355-b524-fed440fdf2d6-inventory\") pod \"a40f9ec8-5379-4355-b524-fed440fdf2d6\" (UID: \"a40f9ec8-5379-4355-b524-fed440fdf2d6\") " Dec 05 17:43:19 crc kubenswrapper[4753]: I1205 17:43:19.814427 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a40f9ec8-5379-4355-b524-fed440fdf2d6-kube-api-access-cqmtr" (OuterVolumeSpecName: "kube-api-access-cqmtr") pod "a40f9ec8-5379-4355-b524-fed440fdf2d6" (UID: "a40f9ec8-5379-4355-b524-fed440fdf2d6"). InnerVolumeSpecName "kube-api-access-cqmtr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:43:19 crc kubenswrapper[4753]: I1205 17:43:19.815006 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a40f9ec8-5379-4355-b524-fed440fdf2d6-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "a40f9ec8-5379-4355-b524-fed440fdf2d6" (UID: "a40f9ec8-5379-4355-b524-fed440fdf2d6"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:43:19 crc kubenswrapper[4753]: I1205 17:43:19.841575 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a40f9ec8-5379-4355-b524-fed440fdf2d6-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "a40f9ec8-5379-4355-b524-fed440fdf2d6" (UID: "a40f9ec8-5379-4355-b524-fed440fdf2d6"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:43:19 crc kubenswrapper[4753]: I1205 17:43:19.857400 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a40f9ec8-5379-4355-b524-fed440fdf2d6-inventory" (OuterVolumeSpecName: "inventory") pod "a40f9ec8-5379-4355-b524-fed440fdf2d6" (UID: "a40f9ec8-5379-4355-b524-fed440fdf2d6"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:43:19 crc kubenswrapper[4753]: I1205 17:43:19.912462 4753 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a40f9ec8-5379-4355-b524-fed440fdf2d6-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:43:19 crc kubenswrapper[4753]: I1205 17:43:19.912502 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cqmtr\" (UniqueName: \"kubernetes.io/projected/a40f9ec8-5379-4355-b524-fed440fdf2d6-kube-api-access-cqmtr\") on node \"crc\" DevicePath \"\"" Dec 05 17:43:19 crc kubenswrapper[4753]: I1205 17:43:19.912514 4753 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a40f9ec8-5379-4355-b524-fed440fdf2d6-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 17:43:19 crc kubenswrapper[4753]: I1205 17:43:19.912527 4753 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a40f9ec8-5379-4355-b524-fed440fdf2d6-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 17:43:20 crc kubenswrapper[4753]: I1205 17:43:20.149527 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fzcvn" event={"ID":"a40f9ec8-5379-4355-b524-fed440fdf2d6","Type":"ContainerDied","Data":"93f0da089ec80b996925df656f1573cd3ea6d1194d14232964f417eec2d98f50"} Dec 05 17:43:20 crc kubenswrapper[4753]: I1205 17:43:20.149570 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="93f0da089ec80b996925df656f1573cd3ea6d1194d14232964f417eec2d98f50" Dec 05 17:43:20 crc kubenswrapper[4753]: I1205 17:43:20.149676 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-fzcvn" Dec 05 17:43:20 crc kubenswrapper[4753]: I1205 17:43:20.262067 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-2drf7"] Dec 05 17:43:20 crc kubenswrapper[4753]: E1205 17:43:20.262533 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a40f9ec8-5379-4355-b524-fed440fdf2d6" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 05 17:43:20 crc kubenswrapper[4753]: I1205 17:43:20.262556 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="a40f9ec8-5379-4355-b524-fed440fdf2d6" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 05 17:43:20 crc kubenswrapper[4753]: E1205 17:43:20.262580 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d846119-9bc3-4d02-b5ce-96bac2685077" containerName="extract-content" Dec 05 17:43:20 crc kubenswrapper[4753]: I1205 17:43:20.262590 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d846119-9bc3-4d02-b5ce-96bac2685077" containerName="extract-content" Dec 05 17:43:20 crc kubenswrapper[4753]: E1205 17:43:20.262609 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d846119-9bc3-4d02-b5ce-96bac2685077" containerName="registry-server" Dec 05 17:43:20 crc kubenswrapper[4753]: I1205 17:43:20.262615 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d846119-9bc3-4d02-b5ce-96bac2685077" containerName="registry-server" Dec 05 17:43:20 crc kubenswrapper[4753]: E1205 17:43:20.262644 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d846119-9bc3-4d02-b5ce-96bac2685077" containerName="extract-utilities" Dec 05 17:43:20 crc kubenswrapper[4753]: I1205 17:43:20.262654 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d846119-9bc3-4d02-b5ce-96bac2685077" containerName="extract-utilities" Dec 05 17:43:20 crc kubenswrapper[4753]: I1205 17:43:20.262881 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d846119-9bc3-4d02-b5ce-96bac2685077" containerName="registry-server" Dec 05 17:43:20 crc kubenswrapper[4753]: I1205 17:43:20.262907 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="a40f9ec8-5379-4355-b524-fed440fdf2d6" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 05 17:43:20 crc kubenswrapper[4753]: I1205 17:43:20.263799 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-2drf7" Dec 05 17:43:20 crc kubenswrapper[4753]: I1205 17:43:20.265679 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-2445p" Dec 05 17:43:20 crc kubenswrapper[4753]: I1205 17:43:20.266166 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 17:43:20 crc kubenswrapper[4753]: I1205 17:43:20.266332 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 17:43:20 crc kubenswrapper[4753]: I1205 17:43:20.266661 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 17:43:20 crc kubenswrapper[4753]: I1205 17:43:20.273707 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-2drf7"] Dec 05 17:43:20 crc kubenswrapper[4753]: I1205 17:43:20.320881 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/825da353-e856-45ac-9cff-027d1f16663a-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-2drf7\" (UID: \"825da353-e856-45ac-9cff-027d1f16663a\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-2drf7" Dec 05 17:43:20 crc kubenswrapper[4753]: I1205 17:43:20.320959 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c4698\" (UniqueName: \"kubernetes.io/projected/825da353-e856-45ac-9cff-027d1f16663a-kube-api-access-c4698\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-2drf7\" (UID: \"825da353-e856-45ac-9cff-027d1f16663a\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-2drf7" Dec 05 17:43:20 crc kubenswrapper[4753]: I1205 17:43:20.321465 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/825da353-e856-45ac-9cff-027d1f16663a-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-2drf7\" (UID: \"825da353-e856-45ac-9cff-027d1f16663a\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-2drf7" Dec 05 17:43:20 crc kubenswrapper[4753]: I1205 17:43:20.424069 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/825da353-e856-45ac-9cff-027d1f16663a-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-2drf7\" (UID: \"825da353-e856-45ac-9cff-027d1f16663a\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-2drf7" Dec 05 17:43:20 crc kubenswrapper[4753]: I1205 17:43:20.424505 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/825da353-e856-45ac-9cff-027d1f16663a-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-2drf7\" (UID: \"825da353-e856-45ac-9cff-027d1f16663a\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-2drf7" Dec 05 17:43:20 crc kubenswrapper[4753]: I1205 17:43:20.424574 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c4698\" (UniqueName: \"kubernetes.io/projected/825da353-e856-45ac-9cff-027d1f16663a-kube-api-access-c4698\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-2drf7\" (UID: \"825da353-e856-45ac-9cff-027d1f16663a\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-2drf7" Dec 05 17:43:20 crc kubenswrapper[4753]: I1205 17:43:20.429377 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/825da353-e856-45ac-9cff-027d1f16663a-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-2drf7\" (UID: \"825da353-e856-45ac-9cff-027d1f16663a\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-2drf7" Dec 05 17:43:20 crc kubenswrapper[4753]: I1205 17:43:20.435230 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/825da353-e856-45ac-9cff-027d1f16663a-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-2drf7\" (UID: \"825da353-e856-45ac-9cff-027d1f16663a\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-2drf7" Dec 05 17:43:20 crc kubenswrapper[4753]: I1205 17:43:20.450979 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c4698\" (UniqueName: \"kubernetes.io/projected/825da353-e856-45ac-9cff-027d1f16663a-kube-api-access-c4698\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-2drf7\" (UID: \"825da353-e856-45ac-9cff-027d1f16663a\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-2drf7" Dec 05 17:43:20 crc kubenswrapper[4753]: I1205 17:43:20.580508 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-2drf7" Dec 05 17:43:21 crc kubenswrapper[4753]: I1205 17:43:21.063072 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-d829-account-create-update-87s9q"] Dec 05 17:43:21 crc kubenswrapper[4753]: I1205 17:43:21.076592 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-d829-account-create-update-87s9q"] Dec 05 17:43:21 crc kubenswrapper[4753]: I1205 17:43:21.189883 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-2drf7"] Dec 05 17:43:21 crc kubenswrapper[4753]: I1205 17:43:21.193755 4753 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 17:43:21 crc kubenswrapper[4753]: I1205 17:43:21.739046 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7ceaaa26-c606-4aba-95c4-837d79fac0b8" path="/var/lib/kubelet/pods/7ceaaa26-c606-4aba-95c4-837d79fac0b8/volumes" Dec 05 17:43:21 crc kubenswrapper[4753]: I1205 17:43:21.776985 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 17:43:22 crc kubenswrapper[4753]: I1205 17:43:22.039565 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-1e4c-account-create-update-5s55z"] Dec 05 17:43:22 crc kubenswrapper[4753]: I1205 17:43:22.048287 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-k7h4r"] Dec 05 17:43:22 crc kubenswrapper[4753]: I1205 17:43:22.057477 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cloudkitty-1e4c-account-create-update-5s55z"] Dec 05 17:43:22 crc kubenswrapper[4753]: I1205 17:43:22.066312 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-k7h4r"] Dec 05 17:43:22 crc kubenswrapper[4753]: I1205 17:43:22.075867 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-443d-account-create-update-fzttf"] Dec 05 17:43:22 crc kubenswrapper[4753]: I1205 17:43:22.085610 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-96mhr"] Dec 05 17:43:22 crc kubenswrapper[4753]: I1205 17:43:22.097451 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-affb-account-create-update-m555z"] Dec 05 17:43:22 crc kubenswrapper[4753]: I1205 17:43:22.105517 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-96mhr"] Dec 05 17:43:22 crc kubenswrapper[4753]: I1205 17:43:22.115356 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-443d-account-create-update-fzttf"] Dec 05 17:43:22 crc kubenswrapper[4753]: I1205 17:43:22.125293 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-affb-account-create-update-m555z"] Dec 05 17:43:22 crc kubenswrapper[4753]: I1205 17:43:22.149048 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-db-create-ks6km"] Dec 05 17:43:22 crc kubenswrapper[4753]: I1205 17:43:22.156692 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-58dh4"] Dec 05 17:43:22 crc kubenswrapper[4753]: I1205 17:43:22.172679 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-2drf7" event={"ID":"825da353-e856-45ac-9cff-027d1f16663a","Type":"ContainerStarted","Data":"1d3961afdbb04a0cf4d9477547d8bdbb5cd4e9f122cdb8db947ce595fb145ed8"} Dec 05 17:43:22 crc kubenswrapper[4753]: I1205 17:43:22.172750 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-2drf7" event={"ID":"825da353-e856-45ac-9cff-027d1f16663a","Type":"ContainerStarted","Data":"30bfb03bc71d413eda86252adbada3c9aa4791f79b7228236cca12c05ea23854"} Dec 05 17:43:22 crc kubenswrapper[4753]: I1205 17:43:22.173958 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-58dh4"] Dec 05 17:43:22 crc kubenswrapper[4753]: I1205 17:43:22.186896 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cloudkitty-db-create-ks6km"] Dec 05 17:43:22 crc kubenswrapper[4753]: I1205 17:43:22.199459 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-2drf7" podStartSLOduration=1.61912864 podStartE2EDuration="2.199438818s" podCreationTimestamp="2025-12-05 17:43:20 +0000 UTC" firstStartedPulling="2025-12-05 17:43:21.193498719 +0000 UTC m=+2339.696605725" lastFinishedPulling="2025-12-05 17:43:21.773808897 +0000 UTC m=+2340.276915903" observedRunningTime="2025-12-05 17:43:22.188778487 +0000 UTC m=+2340.691885493" watchObservedRunningTime="2025-12-05 17:43:22.199438818 +0000 UTC m=+2340.702545824" Dec 05 17:43:23 crc kubenswrapper[4753]: I1205 17:43:23.735911 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="00d86d0d-6896-435e-808f-eec9c8225f99" path="/var/lib/kubelet/pods/00d86d0d-6896-435e-808f-eec9c8225f99/volumes" Dec 05 17:43:23 crc kubenswrapper[4753]: I1205 17:43:23.738925 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1f6e2320-db10-4bfb-a6ac-f2c24dcd91b1" path="/var/lib/kubelet/pods/1f6e2320-db10-4bfb-a6ac-f2c24dcd91b1/volumes" Dec 05 17:43:23 crc kubenswrapper[4753]: I1205 17:43:23.740086 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5af8977e-c995-4b12-a427-c4223f563be6" path="/var/lib/kubelet/pods/5af8977e-c995-4b12-a427-c4223f563be6/volumes" Dec 05 17:43:23 crc kubenswrapper[4753]: I1205 17:43:23.741234 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="842c7431-4ec7-4dae-98ea-c1d6482295f9" path="/var/lib/kubelet/pods/842c7431-4ec7-4dae-98ea-c1d6482295f9/volumes" Dec 05 17:43:23 crc kubenswrapper[4753]: I1205 17:43:23.743954 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="afe6484a-49e0-4297-b722-3ad1eb80f936" path="/var/lib/kubelet/pods/afe6484a-49e0-4297-b722-3ad1eb80f936/volumes" Dec 05 17:43:23 crc kubenswrapper[4753]: I1205 17:43:23.745290 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dc2dad3f-2686-4fdc-8686-aeed53073cd2" path="/var/lib/kubelet/pods/dc2dad3f-2686-4fdc-8686-aeed53073cd2/volumes" Dec 05 17:43:23 crc kubenswrapper[4753]: I1205 17:43:23.746730 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eb4d18c1-5c7a-49e1-8662-29758295058b" path="/var/lib/kubelet/pods/eb4d18c1-5c7a-49e1-8662-29758295058b/volumes" Dec 05 17:43:24 crc kubenswrapper[4753]: I1205 17:43:24.044632 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-a894-account-create-update-rg285"] Dec 05 17:43:24 crc kubenswrapper[4753]: I1205 17:43:24.059731 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-m2cm2"] Dec 05 17:43:24 crc kubenswrapper[4753]: I1205 17:43:24.070915 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-a894-account-create-update-rg285"] Dec 05 17:43:24 crc kubenswrapper[4753]: I1205 17:43:24.080781 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-m2cm2"] Dec 05 17:43:25 crc kubenswrapper[4753]: I1205 17:43:25.031572 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-a206-account-create-update-999f6"] Dec 05 17:43:25 crc kubenswrapper[4753]: I1205 17:43:25.041498 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-pz8kp"] Dec 05 17:43:25 crc kubenswrapper[4753]: I1205 17:43:25.053470 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-a206-account-create-update-999f6"] Dec 05 17:43:25 crc kubenswrapper[4753]: I1205 17:43:25.062519 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-xbccd"] Dec 05 17:43:25 crc kubenswrapper[4753]: I1205 17:43:25.073118 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-xbccd"] Dec 05 17:43:25 crc kubenswrapper[4753]: I1205 17:43:25.085114 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-pz8kp"] Dec 05 17:43:25 crc kubenswrapper[4753]: I1205 17:43:25.739492 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01bbb12f-14b3-4fdb-972e-e33615efe1a3" path="/var/lib/kubelet/pods/01bbb12f-14b3-4fdb-972e-e33615efe1a3/volumes" Dec 05 17:43:25 crc kubenswrapper[4753]: I1205 17:43:25.741684 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0ecd357b-ae3c-42f0-a90b-f09e00af942a" path="/var/lib/kubelet/pods/0ecd357b-ae3c-42f0-a90b-f09e00af942a/volumes" Dec 05 17:43:25 crc kubenswrapper[4753]: I1205 17:43:25.743323 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="26f56e94-9cee-4adb-a29e-74661b598739" path="/var/lib/kubelet/pods/26f56e94-9cee-4adb-a29e-74661b598739/volumes" Dec 05 17:43:25 crc kubenswrapper[4753]: I1205 17:43:25.745199 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="821491b7-948f-4be8-b01f-90168f29ab11" path="/var/lib/kubelet/pods/821491b7-948f-4be8-b01f-90168f29ab11/volumes" Dec 05 17:43:25 crc kubenswrapper[4753]: I1205 17:43:25.746556 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9e1035b6-a207-4ac0-bee6-2ce590e2101e" path="/var/lib/kubelet/pods/9e1035b6-a207-4ac0-bee6-2ce590e2101e/volumes" Dec 05 17:43:26 crc kubenswrapper[4753]: I1205 17:43:26.029896 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-313f-account-create-update-kbxsm"] Dec 05 17:43:26 crc kubenswrapper[4753]: I1205 17:43:26.039124 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-313f-account-create-update-kbxsm"] Dec 05 17:43:27 crc kubenswrapper[4753]: I1205 17:43:27.735869 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5bd95044-6b92-43ac-8bab-05b16590530c" path="/var/lib/kubelet/pods/5bd95044-6b92-43ac-8bab-05b16590530c/volumes" Dec 05 17:43:28 crc kubenswrapper[4753]: I1205 17:43:28.979603 4753 patch_prober.go:28] interesting pod/machine-config-daemon-khn68 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 17:43:28 crc kubenswrapper[4753]: I1205 17:43:28.979844 4753 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 17:43:29 crc kubenswrapper[4753]: I1205 17:43:29.282269 4753 scope.go:117] "RemoveContainer" containerID="a6df7f2bb11ccf0cd801ab25c277addc7ae8dd240ee650b5bded159aae03f091" Dec 05 17:43:29 crc kubenswrapper[4753]: I1205 17:43:29.315638 4753 scope.go:117] "RemoveContainer" containerID="35c4e45d1ccf2ebe69ce1e80cffe58987ee87cdee0530a1b9fabd3d9a618bca6" Dec 05 17:43:29 crc kubenswrapper[4753]: I1205 17:43:29.374783 4753 scope.go:117] "RemoveContainer" containerID="7fd88d83f0f6c452714e7196aa44e0490ec2d551ca776d088b47c2d228a12ab8" Dec 05 17:43:29 crc kubenswrapper[4753]: I1205 17:43:29.425105 4753 scope.go:117] "RemoveContainer" containerID="627652aba9b6ecfe16b97eeb90039e680953cd504bd990ff269a9b3854a69148" Dec 05 17:43:29 crc kubenswrapper[4753]: I1205 17:43:29.468229 4753 scope.go:117] "RemoveContainer" containerID="b3b6ee70c554d233f3c718078ff4c045968a451d8d7697c67f0678dac1ef2bc9" Dec 05 17:43:29 crc kubenswrapper[4753]: I1205 17:43:29.519646 4753 scope.go:117] "RemoveContainer" containerID="64d5cf9961fdc072a99ffd6d9ed8221a2d840498d028dc8aee1ce5aa58450b8f" Dec 05 17:43:29 crc kubenswrapper[4753]: I1205 17:43:29.567446 4753 scope.go:117] "RemoveContainer" containerID="80f6493520b041b792a9da767c061027d7f87b30a886b98911da63d6e47ede9d" Dec 05 17:43:29 crc kubenswrapper[4753]: I1205 17:43:29.597061 4753 scope.go:117] "RemoveContainer" containerID="412bb4c7d740b7f3e594de262a6dab3b328b0f72159f15e79fe2bdf65c6f87e3" Dec 05 17:43:29 crc kubenswrapper[4753]: I1205 17:43:29.619779 4753 scope.go:117] "RemoveContainer" containerID="1cfc1cf52ac40ee3d8b792343ac268277c29047046b61adb1a664a8eaa1dbcbf" Dec 05 17:43:29 crc kubenswrapper[4753]: I1205 17:43:29.643143 4753 scope.go:117] "RemoveContainer" containerID="ff0064874fe9c945f6d60ddbe6732bbda155c4702492d6c26e046e22237f9c4c" Dec 05 17:43:29 crc kubenswrapper[4753]: I1205 17:43:29.666597 4753 scope.go:117] "RemoveContainer" containerID="23e332dd14613502cff19f7e08e6aaad1d1ba9fa8cde51a702b7014210ca4f25" Dec 05 17:43:29 crc kubenswrapper[4753]: I1205 17:43:29.688842 4753 scope.go:117] "RemoveContainer" containerID="3268fbee62474ac7876e00c571e9d7af81d8b948899ea0cbc2e32e6da28ac980" Dec 05 17:43:29 crc kubenswrapper[4753]: I1205 17:43:29.707555 4753 scope.go:117] "RemoveContainer" containerID="937f2a5e4080a88e914a57b9b0bdfe8ca12291ed513d3b3b14c876f10584e8b5" Dec 05 17:43:29 crc kubenswrapper[4753]: I1205 17:43:29.734038 4753 scope.go:117] "RemoveContainer" containerID="7f4b940c570d2aacd28aa054b78af815dd624789f6e3d88278c4363bc411f3ed" Dec 05 17:43:29 crc kubenswrapper[4753]: I1205 17:43:29.756519 4753 scope.go:117] "RemoveContainer" containerID="c7610e6976e797caebc6c465bac38a12718b6f31761f4266863b96ad4f6e1b46" Dec 05 17:43:29 crc kubenswrapper[4753]: I1205 17:43:29.793191 4753 scope.go:117] "RemoveContainer" containerID="edeeac0d7fe94a0ce91bd935d3e74594522aba255e6adea0a00aedec50662ba6" Dec 05 17:43:46 crc kubenswrapper[4753]: I1205 17:43:46.058873 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-4rjz4"] Dec 05 17:43:46 crc kubenswrapper[4753]: I1205 17:43:46.071576 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-4rjz4"] Dec 05 17:43:47 crc kubenswrapper[4753]: I1205 17:43:47.756915 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ac022266-ee65-4109-9d59-b3e122702ccd" path="/var/lib/kubelet/pods/ac022266-ee65-4109-9d59-b3e122702ccd/volumes" Dec 05 17:43:58 crc kubenswrapper[4753]: I1205 17:43:58.979366 4753 patch_prober.go:28] interesting pod/machine-config-daemon-khn68 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 17:43:58 crc kubenswrapper[4753]: I1205 17:43:58.979928 4753 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 17:44:20 crc kubenswrapper[4753]: I1205 17:44:20.048370 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-jzgbr"] Dec 05 17:44:20 crc kubenswrapper[4753]: I1205 17:44:20.061551 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-rwqhq"] Dec 05 17:44:20 crc kubenswrapper[4753]: I1205 17:44:20.078142 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-jzgbr"] Dec 05 17:44:20 crc kubenswrapper[4753]: I1205 17:44:20.089962 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-rwqhq"] Dec 05 17:44:21 crc kubenswrapper[4753]: I1205 17:44:21.740443 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="98474d5b-9d55-4704-a02c-1fd4ccf3f2f9" path="/var/lib/kubelet/pods/98474d5b-9d55-4704-a02c-1fd4ccf3f2f9/volumes" Dec 05 17:44:21 crc kubenswrapper[4753]: I1205 17:44:21.743200 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bbdd764d-13d3-4597-a938-04eeb490a8ba" path="/var/lib/kubelet/pods/bbdd764d-13d3-4597-a938-04eeb490a8ba/volumes" Dec 05 17:44:28 crc kubenswrapper[4753]: I1205 17:44:28.978677 4753 patch_prober.go:28] interesting pod/machine-config-daemon-khn68 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 17:44:28 crc kubenswrapper[4753]: I1205 17:44:28.979092 4753 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 17:44:28 crc kubenswrapper[4753]: I1205 17:44:28.979131 4753 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-khn68" Dec 05 17:44:28 crc kubenswrapper[4753]: I1205 17:44:28.979884 4753 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1a837c74d8c8977a86c74839c544fee61e72d58cfc2be715d8c463cb4d09c5d8"} pod="openshift-machine-config-operator/machine-config-daemon-khn68" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 17:44:28 crc kubenswrapper[4753]: I1205 17:44:28.979952 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerName="machine-config-daemon" containerID="cri-o://1a837c74d8c8977a86c74839c544fee61e72d58cfc2be715d8c463cb4d09c5d8" gracePeriod=600 Dec 05 17:44:29 crc kubenswrapper[4753]: E1205 17:44:29.100716 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 17:44:29 crc kubenswrapper[4753]: I1205 17:44:29.414589 4753 generic.go:334] "Generic (PLEG): container finished" podID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerID="1a837c74d8c8977a86c74839c544fee61e72d58cfc2be715d8c463cb4d09c5d8" exitCode=0 Dec 05 17:44:29 crc kubenswrapper[4753]: I1205 17:44:29.414642 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" event={"ID":"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68","Type":"ContainerDied","Data":"1a837c74d8c8977a86c74839c544fee61e72d58cfc2be715d8c463cb4d09c5d8"} Dec 05 17:44:29 crc kubenswrapper[4753]: I1205 17:44:29.414690 4753 scope.go:117] "RemoveContainer" containerID="991d8322b4cfd6073d1f5ac6852a07230df5501090d0dc07c17c7fbcde13dc9e" Dec 05 17:44:29 crc kubenswrapper[4753]: I1205 17:44:29.415497 4753 scope.go:117] "RemoveContainer" containerID="1a837c74d8c8977a86c74839c544fee61e72d58cfc2be715d8c463cb4d09c5d8" Dec 05 17:44:29 crc kubenswrapper[4753]: E1205 17:44:29.416519 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 17:44:30 crc kubenswrapper[4753]: I1205 17:44:30.090718 4753 scope.go:117] "RemoveContainer" containerID="98ee6cc5d087d5642253f4516ee41bb7601dc040eef2c2f7656a06a05f43b060" Dec 05 17:44:30 crc kubenswrapper[4753]: I1205 17:44:30.169857 4753 scope.go:117] "RemoveContainer" containerID="6a2916f7375c1b6afc1565f9eabc965d1f3ee4ba592c3cad7af614fc036fd121" Dec 05 17:44:30 crc kubenswrapper[4753]: I1205 17:44:30.213260 4753 scope.go:117] "RemoveContainer" containerID="ebd79e80aaf3fe4c3fc1df9b6c3f177dca356f7f9bf9f677ad32e21e54bff97b" Dec 05 17:44:34 crc kubenswrapper[4753]: I1205 17:44:34.063229 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-d47nm"] Dec 05 17:44:34 crc kubenswrapper[4753]: I1205 17:44:34.076245 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-d47nm"] Dec 05 17:44:35 crc kubenswrapper[4753]: I1205 17:44:35.739334 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c6486f24-9b1d-40a4-bfff-7126ab5b1ba4" path="/var/lib/kubelet/pods/c6486f24-9b1d-40a4-bfff-7126ab5b1ba4/volumes" Dec 05 17:44:38 crc kubenswrapper[4753]: I1205 17:44:38.039444 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-klc67"] Dec 05 17:44:38 crc kubenswrapper[4753]: I1205 17:44:38.053730 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-klc67"] Dec 05 17:44:39 crc kubenswrapper[4753]: I1205 17:44:39.755039 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5d74be7c-896f-4771-8d81-293a02d24cd5" path="/var/lib/kubelet/pods/5d74be7c-896f-4771-8d81-293a02d24cd5/volumes" Dec 05 17:44:44 crc kubenswrapper[4753]: I1205 17:44:44.721090 4753 scope.go:117] "RemoveContainer" containerID="1a837c74d8c8977a86c74839c544fee61e72d58cfc2be715d8c463cb4d09c5d8" Dec 05 17:44:44 crc kubenswrapper[4753]: E1205 17:44:44.722171 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 17:44:49 crc kubenswrapper[4753]: I1205 17:44:49.084555 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-l429g"] Dec 05 17:44:49 crc kubenswrapper[4753]: I1205 17:44:49.098312 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-l429g"] Dec 05 17:44:49 crc kubenswrapper[4753]: I1205 17:44:49.750906 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="562378ad-0bd6-4143-bc1e-331ec844e38f" path="/var/lib/kubelet/pods/562378ad-0bd6-4143-bc1e-331ec844e38f/volumes" Dec 05 17:44:55 crc kubenswrapper[4753]: I1205 17:44:55.034054 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-f6c6z"] Dec 05 17:44:55 crc kubenswrapper[4753]: I1205 17:44:55.074079 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-f6c6z"] Dec 05 17:44:55 crc kubenswrapper[4753]: I1205 17:44:55.740937 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="68a4a494-d4ff-43ee-a74c-4f0377d229ec" path="/var/lib/kubelet/pods/68a4a494-d4ff-43ee-a74c-4f0377d229ec/volumes" Dec 05 17:44:58 crc kubenswrapper[4753]: I1205 17:44:58.720737 4753 scope.go:117] "RemoveContainer" containerID="1a837c74d8c8977a86c74839c544fee61e72d58cfc2be715d8c463cb4d09c5d8" Dec 05 17:44:58 crc kubenswrapper[4753]: E1205 17:44:58.721260 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 17:45:00 crc kubenswrapper[4753]: I1205 17:45:00.159188 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415945-22sp8"] Dec 05 17:45:00 crc kubenswrapper[4753]: I1205 17:45:00.166733 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415945-22sp8" Dec 05 17:45:00 crc kubenswrapper[4753]: I1205 17:45:00.173320 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 17:45:00 crc kubenswrapper[4753]: I1205 17:45:00.173606 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 17:45:00 crc kubenswrapper[4753]: I1205 17:45:00.176092 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/55491de8-76f7-4830-b11e-f0c2e18845ae-secret-volume\") pod \"collect-profiles-29415945-22sp8\" (UID: \"55491de8-76f7-4830-b11e-f0c2e18845ae\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415945-22sp8" Dec 05 17:45:00 crc kubenswrapper[4753]: I1205 17:45:00.176206 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/55491de8-76f7-4830-b11e-f0c2e18845ae-config-volume\") pod \"collect-profiles-29415945-22sp8\" (UID: \"55491de8-76f7-4830-b11e-f0c2e18845ae\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415945-22sp8" Dec 05 17:45:00 crc kubenswrapper[4753]: I1205 17:45:00.196401 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vqb2n\" (UniqueName: \"kubernetes.io/projected/55491de8-76f7-4830-b11e-f0c2e18845ae-kube-api-access-vqb2n\") pod \"collect-profiles-29415945-22sp8\" (UID: \"55491de8-76f7-4830-b11e-f0c2e18845ae\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415945-22sp8" Dec 05 17:45:00 crc kubenswrapper[4753]: I1205 17:45:00.216203 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415945-22sp8"] Dec 05 17:45:00 crc kubenswrapper[4753]: I1205 17:45:00.298536 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/55491de8-76f7-4830-b11e-f0c2e18845ae-secret-volume\") pod \"collect-profiles-29415945-22sp8\" (UID: \"55491de8-76f7-4830-b11e-f0c2e18845ae\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415945-22sp8" Dec 05 17:45:00 crc kubenswrapper[4753]: I1205 17:45:00.298599 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/55491de8-76f7-4830-b11e-f0c2e18845ae-config-volume\") pod \"collect-profiles-29415945-22sp8\" (UID: \"55491de8-76f7-4830-b11e-f0c2e18845ae\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415945-22sp8" Dec 05 17:45:00 crc kubenswrapper[4753]: I1205 17:45:00.298707 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vqb2n\" (UniqueName: \"kubernetes.io/projected/55491de8-76f7-4830-b11e-f0c2e18845ae-kube-api-access-vqb2n\") pod \"collect-profiles-29415945-22sp8\" (UID: \"55491de8-76f7-4830-b11e-f0c2e18845ae\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415945-22sp8" Dec 05 17:45:00 crc kubenswrapper[4753]: I1205 17:45:00.299658 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/55491de8-76f7-4830-b11e-f0c2e18845ae-config-volume\") pod \"collect-profiles-29415945-22sp8\" (UID: \"55491de8-76f7-4830-b11e-f0c2e18845ae\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415945-22sp8" Dec 05 17:45:00 crc kubenswrapper[4753]: I1205 17:45:00.305011 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/55491de8-76f7-4830-b11e-f0c2e18845ae-secret-volume\") pod \"collect-profiles-29415945-22sp8\" (UID: \"55491de8-76f7-4830-b11e-f0c2e18845ae\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415945-22sp8" Dec 05 17:45:00 crc kubenswrapper[4753]: I1205 17:45:00.314894 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vqb2n\" (UniqueName: \"kubernetes.io/projected/55491de8-76f7-4830-b11e-f0c2e18845ae-kube-api-access-vqb2n\") pod \"collect-profiles-29415945-22sp8\" (UID: \"55491de8-76f7-4830-b11e-f0c2e18845ae\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415945-22sp8" Dec 05 17:45:00 crc kubenswrapper[4753]: I1205 17:45:00.497731 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415945-22sp8" Dec 05 17:45:00 crc kubenswrapper[4753]: I1205 17:45:00.991723 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415945-22sp8"] Dec 05 17:45:01 crc kubenswrapper[4753]: I1205 17:45:01.832907 4753 generic.go:334] "Generic (PLEG): container finished" podID="55491de8-76f7-4830-b11e-f0c2e18845ae" containerID="3d0f6beb0e80532da3ddaaa2c7457e5fd4e4970b034c9f63fa7a42403fbef956" exitCode=0 Dec 05 17:45:01 crc kubenswrapper[4753]: I1205 17:45:01.832991 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415945-22sp8" event={"ID":"55491de8-76f7-4830-b11e-f0c2e18845ae","Type":"ContainerDied","Data":"3d0f6beb0e80532da3ddaaa2c7457e5fd4e4970b034c9f63fa7a42403fbef956"} Dec 05 17:45:01 crc kubenswrapper[4753]: I1205 17:45:01.833274 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415945-22sp8" event={"ID":"55491de8-76f7-4830-b11e-f0c2e18845ae","Type":"ContainerStarted","Data":"38aa855e23647e72ab0a3edebab91b7847ff8f334acdc3614b86c9d295025efa"} Dec 05 17:45:03 crc kubenswrapper[4753]: I1205 17:45:03.295057 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415945-22sp8" Dec 05 17:45:03 crc kubenswrapper[4753]: I1205 17:45:03.361102 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/55491de8-76f7-4830-b11e-f0c2e18845ae-secret-volume\") pod \"55491de8-76f7-4830-b11e-f0c2e18845ae\" (UID: \"55491de8-76f7-4830-b11e-f0c2e18845ae\") " Dec 05 17:45:03 crc kubenswrapper[4753]: I1205 17:45:03.361273 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/55491de8-76f7-4830-b11e-f0c2e18845ae-config-volume\") pod \"55491de8-76f7-4830-b11e-f0c2e18845ae\" (UID: \"55491de8-76f7-4830-b11e-f0c2e18845ae\") " Dec 05 17:45:03 crc kubenswrapper[4753]: I1205 17:45:03.361418 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vqb2n\" (UniqueName: \"kubernetes.io/projected/55491de8-76f7-4830-b11e-f0c2e18845ae-kube-api-access-vqb2n\") pod \"55491de8-76f7-4830-b11e-f0c2e18845ae\" (UID: \"55491de8-76f7-4830-b11e-f0c2e18845ae\") " Dec 05 17:45:03 crc kubenswrapper[4753]: I1205 17:45:03.362086 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/55491de8-76f7-4830-b11e-f0c2e18845ae-config-volume" (OuterVolumeSpecName: "config-volume") pod "55491de8-76f7-4830-b11e-f0c2e18845ae" (UID: "55491de8-76f7-4830-b11e-f0c2e18845ae"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:45:03 crc kubenswrapper[4753]: I1205 17:45:03.363284 4753 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/55491de8-76f7-4830-b11e-f0c2e18845ae-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 17:45:03 crc kubenswrapper[4753]: I1205 17:45:03.366980 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/55491de8-76f7-4830-b11e-f0c2e18845ae-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "55491de8-76f7-4830-b11e-f0c2e18845ae" (UID: "55491de8-76f7-4830-b11e-f0c2e18845ae"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:45:03 crc kubenswrapper[4753]: I1205 17:45:03.367106 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/55491de8-76f7-4830-b11e-f0c2e18845ae-kube-api-access-vqb2n" (OuterVolumeSpecName: "kube-api-access-vqb2n") pod "55491de8-76f7-4830-b11e-f0c2e18845ae" (UID: "55491de8-76f7-4830-b11e-f0c2e18845ae"). InnerVolumeSpecName "kube-api-access-vqb2n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:45:03 crc kubenswrapper[4753]: I1205 17:45:03.465676 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vqb2n\" (UniqueName: \"kubernetes.io/projected/55491de8-76f7-4830-b11e-f0c2e18845ae-kube-api-access-vqb2n\") on node \"crc\" DevicePath \"\"" Dec 05 17:45:03 crc kubenswrapper[4753]: I1205 17:45:03.465715 4753 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/55491de8-76f7-4830-b11e-f0c2e18845ae-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 17:45:03 crc kubenswrapper[4753]: I1205 17:45:03.864525 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415945-22sp8" event={"ID":"55491de8-76f7-4830-b11e-f0c2e18845ae","Type":"ContainerDied","Data":"38aa855e23647e72ab0a3edebab91b7847ff8f334acdc3614b86c9d295025efa"} Dec 05 17:45:03 crc kubenswrapper[4753]: I1205 17:45:03.864571 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="38aa855e23647e72ab0a3edebab91b7847ff8f334acdc3614b86c9d295025efa" Dec 05 17:45:03 crc kubenswrapper[4753]: I1205 17:45:03.864624 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415945-22sp8" Dec 05 17:45:04 crc kubenswrapper[4753]: I1205 17:45:04.386210 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415900-2lmxf"] Dec 05 17:45:04 crc kubenswrapper[4753]: I1205 17:45:04.400421 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415900-2lmxf"] Dec 05 17:45:05 crc kubenswrapper[4753]: I1205 17:45:05.734313 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c18256b2-4287-4df0-8819-201cf14c6380" path="/var/lib/kubelet/pods/c18256b2-4287-4df0-8819-201cf14c6380/volumes" Dec 05 17:45:13 crc kubenswrapper[4753]: I1205 17:45:13.720827 4753 scope.go:117] "RemoveContainer" containerID="1a837c74d8c8977a86c74839c544fee61e72d58cfc2be715d8c463cb4d09c5d8" Dec 05 17:45:13 crc kubenswrapper[4753]: E1205 17:45:13.721738 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 17:45:27 crc kubenswrapper[4753]: I1205 17:45:27.721204 4753 scope.go:117] "RemoveContainer" containerID="1a837c74d8c8977a86c74839c544fee61e72d58cfc2be715d8c463cb4d09c5d8" Dec 05 17:45:27 crc kubenswrapper[4753]: E1205 17:45:27.721953 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 17:45:30 crc kubenswrapper[4753]: I1205 17:45:30.064947 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-bf9vx"] Dec 05 17:45:30 crc kubenswrapper[4753]: E1205 17:45:30.065954 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55491de8-76f7-4830-b11e-f0c2e18845ae" containerName="collect-profiles" Dec 05 17:45:30 crc kubenswrapper[4753]: I1205 17:45:30.065978 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="55491de8-76f7-4830-b11e-f0c2e18845ae" containerName="collect-profiles" Dec 05 17:45:30 crc kubenswrapper[4753]: I1205 17:45:30.066378 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="55491de8-76f7-4830-b11e-f0c2e18845ae" containerName="collect-profiles" Dec 05 17:45:30 crc kubenswrapper[4753]: I1205 17:45:30.068682 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bf9vx" Dec 05 17:45:30 crc kubenswrapper[4753]: I1205 17:45:30.085998 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-bf9vx"] Dec 05 17:45:30 crc kubenswrapper[4753]: I1205 17:45:30.198869 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lrz8c\" (UniqueName: \"kubernetes.io/projected/684390bd-4ec8-4f9c-aeea-ac8dc935bf88-kube-api-access-lrz8c\") pod \"redhat-marketplace-bf9vx\" (UID: \"684390bd-4ec8-4f9c-aeea-ac8dc935bf88\") " pod="openshift-marketplace/redhat-marketplace-bf9vx" Dec 05 17:45:30 crc kubenswrapper[4753]: I1205 17:45:30.199222 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/684390bd-4ec8-4f9c-aeea-ac8dc935bf88-utilities\") pod \"redhat-marketplace-bf9vx\" (UID: \"684390bd-4ec8-4f9c-aeea-ac8dc935bf88\") " pod="openshift-marketplace/redhat-marketplace-bf9vx" Dec 05 17:45:30 crc kubenswrapper[4753]: I1205 17:45:30.199294 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/684390bd-4ec8-4f9c-aeea-ac8dc935bf88-catalog-content\") pod \"redhat-marketplace-bf9vx\" (UID: \"684390bd-4ec8-4f9c-aeea-ac8dc935bf88\") " pod="openshift-marketplace/redhat-marketplace-bf9vx" Dec 05 17:45:30 crc kubenswrapper[4753]: I1205 17:45:30.301276 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lrz8c\" (UniqueName: \"kubernetes.io/projected/684390bd-4ec8-4f9c-aeea-ac8dc935bf88-kube-api-access-lrz8c\") pod \"redhat-marketplace-bf9vx\" (UID: \"684390bd-4ec8-4f9c-aeea-ac8dc935bf88\") " pod="openshift-marketplace/redhat-marketplace-bf9vx" Dec 05 17:45:30 crc kubenswrapper[4753]: I1205 17:45:30.301334 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/684390bd-4ec8-4f9c-aeea-ac8dc935bf88-utilities\") pod \"redhat-marketplace-bf9vx\" (UID: \"684390bd-4ec8-4f9c-aeea-ac8dc935bf88\") " pod="openshift-marketplace/redhat-marketplace-bf9vx" Dec 05 17:45:30 crc kubenswrapper[4753]: I1205 17:45:30.301406 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/684390bd-4ec8-4f9c-aeea-ac8dc935bf88-catalog-content\") pod \"redhat-marketplace-bf9vx\" (UID: \"684390bd-4ec8-4f9c-aeea-ac8dc935bf88\") " pod="openshift-marketplace/redhat-marketplace-bf9vx" Dec 05 17:45:30 crc kubenswrapper[4753]: I1205 17:45:30.301943 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/684390bd-4ec8-4f9c-aeea-ac8dc935bf88-utilities\") pod \"redhat-marketplace-bf9vx\" (UID: \"684390bd-4ec8-4f9c-aeea-ac8dc935bf88\") " pod="openshift-marketplace/redhat-marketplace-bf9vx" Dec 05 17:45:30 crc kubenswrapper[4753]: I1205 17:45:30.301980 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/684390bd-4ec8-4f9c-aeea-ac8dc935bf88-catalog-content\") pod \"redhat-marketplace-bf9vx\" (UID: \"684390bd-4ec8-4f9c-aeea-ac8dc935bf88\") " pod="openshift-marketplace/redhat-marketplace-bf9vx" Dec 05 17:45:30 crc kubenswrapper[4753]: I1205 17:45:30.324404 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lrz8c\" (UniqueName: \"kubernetes.io/projected/684390bd-4ec8-4f9c-aeea-ac8dc935bf88-kube-api-access-lrz8c\") pod \"redhat-marketplace-bf9vx\" (UID: \"684390bd-4ec8-4f9c-aeea-ac8dc935bf88\") " pod="openshift-marketplace/redhat-marketplace-bf9vx" Dec 05 17:45:30 crc kubenswrapper[4753]: I1205 17:45:30.345457 4753 scope.go:117] "RemoveContainer" containerID="a05c9724617522c7f719a6bb68143d24b7811e88120000f953246afd4de857bc" Dec 05 17:45:30 crc kubenswrapper[4753]: I1205 17:45:30.400412 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bf9vx" Dec 05 17:45:30 crc kubenswrapper[4753]: I1205 17:45:30.404943 4753 scope.go:117] "RemoveContainer" containerID="055bb8811e611e031ed0d48db68708c72de592577aab13446925fce025eb4e8d" Dec 05 17:45:30 crc kubenswrapper[4753]: I1205 17:45:30.448189 4753 scope.go:117] "RemoveContainer" containerID="62ed0226dbfaa3d50e7bb491688c2ee289e0378607a353b658f6c874aced0309" Dec 05 17:45:30 crc kubenswrapper[4753]: I1205 17:45:30.560486 4753 scope.go:117] "RemoveContainer" containerID="89de624176223c811cb2f57a41e6098fc22334831d8859e206efa904e026fa14" Dec 05 17:45:30 crc kubenswrapper[4753]: I1205 17:45:30.614774 4753 scope.go:117] "RemoveContainer" containerID="db9b02981461df0ef06b5460767b1cc32682af21a68b9885dc337ec2d7def6b0" Dec 05 17:45:30 crc kubenswrapper[4753]: I1205 17:45:30.894337 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-bf9vx"] Dec 05 17:45:31 crc kubenswrapper[4753]: I1205 17:45:31.170973 4753 generic.go:334] "Generic (PLEG): container finished" podID="684390bd-4ec8-4f9c-aeea-ac8dc935bf88" containerID="ac54d44728bb9800e31d0d45f06d2a3420f3f07b368d0ed7c3a0810051bc2eac" exitCode=0 Dec 05 17:45:31 crc kubenswrapper[4753]: I1205 17:45:31.171342 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bf9vx" event={"ID":"684390bd-4ec8-4f9c-aeea-ac8dc935bf88","Type":"ContainerDied","Data":"ac54d44728bb9800e31d0d45f06d2a3420f3f07b368d0ed7c3a0810051bc2eac"} Dec 05 17:45:31 crc kubenswrapper[4753]: I1205 17:45:31.171368 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bf9vx" event={"ID":"684390bd-4ec8-4f9c-aeea-ac8dc935bf88","Type":"ContainerStarted","Data":"cadc16043401fb70b9160cf346a02c3ef1e3255795e65da21a5092e522aef103"} Dec 05 17:45:32 crc kubenswrapper[4753]: I1205 17:45:32.183531 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bf9vx" event={"ID":"684390bd-4ec8-4f9c-aeea-ac8dc935bf88","Type":"ContainerStarted","Data":"742398dbb92da7e60a1b777aa2deda9a64bb3c69d3b688932f7aad0394062846"} Dec 05 17:45:33 crc kubenswrapper[4753]: I1205 17:45:33.195491 4753 generic.go:334] "Generic (PLEG): container finished" podID="684390bd-4ec8-4f9c-aeea-ac8dc935bf88" containerID="742398dbb92da7e60a1b777aa2deda9a64bb3c69d3b688932f7aad0394062846" exitCode=0 Dec 05 17:45:33 crc kubenswrapper[4753]: I1205 17:45:33.195552 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bf9vx" event={"ID":"684390bd-4ec8-4f9c-aeea-ac8dc935bf88","Type":"ContainerDied","Data":"742398dbb92da7e60a1b777aa2deda9a64bb3c69d3b688932f7aad0394062846"} Dec 05 17:45:34 crc kubenswrapper[4753]: I1205 17:45:34.212054 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bf9vx" event={"ID":"684390bd-4ec8-4f9c-aeea-ac8dc935bf88","Type":"ContainerStarted","Data":"860ca6f8ddb476a52db450aee379af20bc09d1753946c9d6afddbc1a45c4632f"} Dec 05 17:45:34 crc kubenswrapper[4753]: I1205 17:45:34.235813 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-bf9vx" podStartSLOduration=1.769208415 podStartE2EDuration="4.235792378s" podCreationTimestamp="2025-12-05 17:45:30 +0000 UTC" firstStartedPulling="2025-12-05 17:45:31.173257342 +0000 UTC m=+2469.676364348" lastFinishedPulling="2025-12-05 17:45:33.639841305 +0000 UTC m=+2472.142948311" observedRunningTime="2025-12-05 17:45:34.232488945 +0000 UTC m=+2472.735595981" watchObservedRunningTime="2025-12-05 17:45:34.235792378 +0000 UTC m=+2472.738899404" Dec 05 17:45:40 crc kubenswrapper[4753]: I1205 17:45:40.057304 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-7rkhf"] Dec 05 17:45:40 crc kubenswrapper[4753]: I1205 17:45:40.076880 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-ncw7r"] Dec 05 17:45:40 crc kubenswrapper[4753]: I1205 17:45:40.094540 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-qvb5q"] Dec 05 17:45:40 crc kubenswrapper[4753]: I1205 17:45:40.103634 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-134b-account-create-update-qf8qh"] Dec 05 17:45:40 crc kubenswrapper[4753]: I1205 17:45:40.113189 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-qvb5q"] Dec 05 17:45:40 crc kubenswrapper[4753]: I1205 17:45:40.123542 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-080f-account-create-update-hsplf"] Dec 05 17:45:40 crc kubenswrapper[4753]: I1205 17:45:40.173570 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-ncw7r"] Dec 05 17:45:40 crc kubenswrapper[4753]: I1205 17:45:40.185163 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-7rkhf"] Dec 05 17:45:40 crc kubenswrapper[4753]: I1205 17:45:40.194914 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-ae67-account-create-update-lxk95"] Dec 05 17:45:40 crc kubenswrapper[4753]: I1205 17:45:40.204962 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-134b-account-create-update-qf8qh"] Dec 05 17:45:40 crc kubenswrapper[4753]: I1205 17:45:40.213315 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-080f-account-create-update-hsplf"] Dec 05 17:45:40 crc kubenswrapper[4753]: I1205 17:45:40.222076 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-ae67-account-create-update-lxk95"] Dec 05 17:45:40 crc kubenswrapper[4753]: I1205 17:45:40.401114 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-bf9vx" Dec 05 17:45:40 crc kubenswrapper[4753]: I1205 17:45:40.401182 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-bf9vx" Dec 05 17:45:40 crc kubenswrapper[4753]: I1205 17:45:40.471879 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-bf9vx" Dec 05 17:45:40 crc kubenswrapper[4753]: I1205 17:45:40.721317 4753 scope.go:117] "RemoveContainer" containerID="1a837c74d8c8977a86c74839c544fee61e72d58cfc2be715d8c463cb4d09c5d8" Dec 05 17:45:40 crc kubenswrapper[4753]: E1205 17:45:40.721691 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 17:45:41 crc kubenswrapper[4753]: I1205 17:45:41.340322 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-bf9vx" Dec 05 17:45:41 crc kubenswrapper[4753]: I1205 17:45:41.388095 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-bf9vx"] Dec 05 17:45:41 crc kubenswrapper[4753]: I1205 17:45:41.735552 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2f79146b-e55c-4ccf-93b3-91829167768b" path="/var/lib/kubelet/pods/2f79146b-e55c-4ccf-93b3-91829167768b/volumes" Dec 05 17:45:41 crc kubenswrapper[4753]: I1205 17:45:41.736431 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="762045ba-049a-471e-b6dc-ac74b0c28bfa" path="/var/lib/kubelet/pods/762045ba-049a-471e-b6dc-ac74b0c28bfa/volumes" Dec 05 17:45:41 crc kubenswrapper[4753]: I1205 17:45:41.737298 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8b231cbb-8d95-4948-aba8-825809d77fa7" path="/var/lib/kubelet/pods/8b231cbb-8d95-4948-aba8-825809d77fa7/volumes" Dec 05 17:45:41 crc kubenswrapper[4753]: I1205 17:45:41.738194 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b33874b7-5966-44b0-9a9c-a555e52127b9" path="/var/lib/kubelet/pods/b33874b7-5966-44b0-9a9c-a555e52127b9/volumes" Dec 05 17:45:41 crc kubenswrapper[4753]: I1205 17:45:41.739586 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d9465bae-5a66-4ab6-956b-1258eb08db35" path="/var/lib/kubelet/pods/d9465bae-5a66-4ab6-956b-1258eb08db35/volumes" Dec 05 17:45:41 crc kubenswrapper[4753]: I1205 17:45:41.740336 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="de05c77d-b326-4c0c-996f-a78f35ae6694" path="/var/lib/kubelet/pods/de05c77d-b326-4c0c-996f-a78f35ae6694/volumes" Dec 05 17:45:43 crc kubenswrapper[4753]: I1205 17:45:43.314173 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-bf9vx" podUID="684390bd-4ec8-4f9c-aeea-ac8dc935bf88" containerName="registry-server" containerID="cri-o://860ca6f8ddb476a52db450aee379af20bc09d1753946c9d6afddbc1a45c4632f" gracePeriod=2 Dec 05 17:45:43 crc kubenswrapper[4753]: I1205 17:45:43.858454 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bf9vx" Dec 05 17:45:43 crc kubenswrapper[4753]: I1205 17:45:43.951719 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lrz8c\" (UniqueName: \"kubernetes.io/projected/684390bd-4ec8-4f9c-aeea-ac8dc935bf88-kube-api-access-lrz8c\") pod \"684390bd-4ec8-4f9c-aeea-ac8dc935bf88\" (UID: \"684390bd-4ec8-4f9c-aeea-ac8dc935bf88\") " Dec 05 17:45:43 crc kubenswrapper[4753]: I1205 17:45:43.951944 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/684390bd-4ec8-4f9c-aeea-ac8dc935bf88-utilities\") pod \"684390bd-4ec8-4f9c-aeea-ac8dc935bf88\" (UID: \"684390bd-4ec8-4f9c-aeea-ac8dc935bf88\") " Dec 05 17:45:43 crc kubenswrapper[4753]: I1205 17:45:43.952916 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/684390bd-4ec8-4f9c-aeea-ac8dc935bf88-utilities" (OuterVolumeSpecName: "utilities") pod "684390bd-4ec8-4f9c-aeea-ac8dc935bf88" (UID: "684390bd-4ec8-4f9c-aeea-ac8dc935bf88"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:45:43 crc kubenswrapper[4753]: I1205 17:45:43.952978 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/684390bd-4ec8-4f9c-aeea-ac8dc935bf88-catalog-content\") pod \"684390bd-4ec8-4f9c-aeea-ac8dc935bf88\" (UID: \"684390bd-4ec8-4f9c-aeea-ac8dc935bf88\") " Dec 05 17:45:43 crc kubenswrapper[4753]: I1205 17:45:43.956008 4753 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/684390bd-4ec8-4f9c-aeea-ac8dc935bf88-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 17:45:43 crc kubenswrapper[4753]: I1205 17:45:43.961262 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/684390bd-4ec8-4f9c-aeea-ac8dc935bf88-kube-api-access-lrz8c" (OuterVolumeSpecName: "kube-api-access-lrz8c") pod "684390bd-4ec8-4f9c-aeea-ac8dc935bf88" (UID: "684390bd-4ec8-4f9c-aeea-ac8dc935bf88"). InnerVolumeSpecName "kube-api-access-lrz8c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:45:43 crc kubenswrapper[4753]: I1205 17:45:43.983861 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/684390bd-4ec8-4f9c-aeea-ac8dc935bf88-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "684390bd-4ec8-4f9c-aeea-ac8dc935bf88" (UID: "684390bd-4ec8-4f9c-aeea-ac8dc935bf88"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:45:44 crc kubenswrapper[4753]: I1205 17:45:44.059997 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lrz8c\" (UniqueName: \"kubernetes.io/projected/684390bd-4ec8-4f9c-aeea-ac8dc935bf88-kube-api-access-lrz8c\") on node \"crc\" DevicePath \"\"" Dec 05 17:45:44 crc kubenswrapper[4753]: I1205 17:45:44.060123 4753 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/684390bd-4ec8-4f9c-aeea-ac8dc935bf88-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 17:45:44 crc kubenswrapper[4753]: I1205 17:45:44.335884 4753 generic.go:334] "Generic (PLEG): container finished" podID="684390bd-4ec8-4f9c-aeea-ac8dc935bf88" containerID="860ca6f8ddb476a52db450aee379af20bc09d1753946c9d6afddbc1a45c4632f" exitCode=0 Dec 05 17:45:44 crc kubenswrapper[4753]: I1205 17:45:44.335972 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bf9vx" Dec 05 17:45:44 crc kubenswrapper[4753]: I1205 17:45:44.335968 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bf9vx" event={"ID":"684390bd-4ec8-4f9c-aeea-ac8dc935bf88","Type":"ContainerDied","Data":"860ca6f8ddb476a52db450aee379af20bc09d1753946c9d6afddbc1a45c4632f"} Dec 05 17:45:44 crc kubenswrapper[4753]: I1205 17:45:44.336144 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bf9vx" event={"ID":"684390bd-4ec8-4f9c-aeea-ac8dc935bf88","Type":"ContainerDied","Data":"cadc16043401fb70b9160cf346a02c3ef1e3255795e65da21a5092e522aef103"} Dec 05 17:45:44 crc kubenswrapper[4753]: I1205 17:45:44.336262 4753 scope.go:117] "RemoveContainer" containerID="860ca6f8ddb476a52db450aee379af20bc09d1753946c9d6afddbc1a45c4632f" Dec 05 17:45:44 crc kubenswrapper[4753]: I1205 17:45:44.382215 4753 scope.go:117] "RemoveContainer" containerID="742398dbb92da7e60a1b777aa2deda9a64bb3c69d3b688932f7aad0394062846" Dec 05 17:45:44 crc kubenswrapper[4753]: I1205 17:45:44.428488 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-bf9vx"] Dec 05 17:45:44 crc kubenswrapper[4753]: I1205 17:45:44.433305 4753 scope.go:117] "RemoveContainer" containerID="ac54d44728bb9800e31d0d45f06d2a3420f3f07b368d0ed7c3a0810051bc2eac" Dec 05 17:45:44 crc kubenswrapper[4753]: I1205 17:45:44.444323 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-bf9vx"] Dec 05 17:45:44 crc kubenswrapper[4753]: I1205 17:45:44.472697 4753 scope.go:117] "RemoveContainer" containerID="860ca6f8ddb476a52db450aee379af20bc09d1753946c9d6afddbc1a45c4632f" Dec 05 17:45:44 crc kubenswrapper[4753]: E1205 17:45:44.473577 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"860ca6f8ddb476a52db450aee379af20bc09d1753946c9d6afddbc1a45c4632f\": container with ID starting with 860ca6f8ddb476a52db450aee379af20bc09d1753946c9d6afddbc1a45c4632f not found: ID does not exist" containerID="860ca6f8ddb476a52db450aee379af20bc09d1753946c9d6afddbc1a45c4632f" Dec 05 17:45:44 crc kubenswrapper[4753]: I1205 17:45:44.473638 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"860ca6f8ddb476a52db450aee379af20bc09d1753946c9d6afddbc1a45c4632f"} err="failed to get container status \"860ca6f8ddb476a52db450aee379af20bc09d1753946c9d6afddbc1a45c4632f\": rpc error: code = NotFound desc = could not find container \"860ca6f8ddb476a52db450aee379af20bc09d1753946c9d6afddbc1a45c4632f\": container with ID starting with 860ca6f8ddb476a52db450aee379af20bc09d1753946c9d6afddbc1a45c4632f not found: ID does not exist" Dec 05 17:45:44 crc kubenswrapper[4753]: I1205 17:45:44.473742 4753 scope.go:117] "RemoveContainer" containerID="742398dbb92da7e60a1b777aa2deda9a64bb3c69d3b688932f7aad0394062846" Dec 05 17:45:44 crc kubenswrapper[4753]: E1205 17:45:44.474142 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"742398dbb92da7e60a1b777aa2deda9a64bb3c69d3b688932f7aad0394062846\": container with ID starting with 742398dbb92da7e60a1b777aa2deda9a64bb3c69d3b688932f7aad0394062846 not found: ID does not exist" containerID="742398dbb92da7e60a1b777aa2deda9a64bb3c69d3b688932f7aad0394062846" Dec 05 17:45:44 crc kubenswrapper[4753]: I1205 17:45:44.474193 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"742398dbb92da7e60a1b777aa2deda9a64bb3c69d3b688932f7aad0394062846"} err="failed to get container status \"742398dbb92da7e60a1b777aa2deda9a64bb3c69d3b688932f7aad0394062846\": rpc error: code = NotFound desc = could not find container \"742398dbb92da7e60a1b777aa2deda9a64bb3c69d3b688932f7aad0394062846\": container with ID starting with 742398dbb92da7e60a1b777aa2deda9a64bb3c69d3b688932f7aad0394062846 not found: ID does not exist" Dec 05 17:45:44 crc kubenswrapper[4753]: I1205 17:45:44.474213 4753 scope.go:117] "RemoveContainer" containerID="ac54d44728bb9800e31d0d45f06d2a3420f3f07b368d0ed7c3a0810051bc2eac" Dec 05 17:45:44 crc kubenswrapper[4753]: E1205 17:45:44.474553 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ac54d44728bb9800e31d0d45f06d2a3420f3f07b368d0ed7c3a0810051bc2eac\": container with ID starting with ac54d44728bb9800e31d0d45f06d2a3420f3f07b368d0ed7c3a0810051bc2eac not found: ID does not exist" containerID="ac54d44728bb9800e31d0d45f06d2a3420f3f07b368d0ed7c3a0810051bc2eac" Dec 05 17:45:44 crc kubenswrapper[4753]: I1205 17:45:44.474581 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ac54d44728bb9800e31d0d45f06d2a3420f3f07b368d0ed7c3a0810051bc2eac"} err="failed to get container status \"ac54d44728bb9800e31d0d45f06d2a3420f3f07b368d0ed7c3a0810051bc2eac\": rpc error: code = NotFound desc = could not find container \"ac54d44728bb9800e31d0d45f06d2a3420f3f07b368d0ed7c3a0810051bc2eac\": container with ID starting with ac54d44728bb9800e31d0d45f06d2a3420f3f07b368d0ed7c3a0810051bc2eac not found: ID does not exist" Dec 05 17:45:45 crc kubenswrapper[4753]: I1205 17:45:45.737039 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="684390bd-4ec8-4f9c-aeea-ac8dc935bf88" path="/var/lib/kubelet/pods/684390bd-4ec8-4f9c-aeea-ac8dc935bf88/volumes" Dec 05 17:45:55 crc kubenswrapper[4753]: I1205 17:45:55.721854 4753 scope.go:117] "RemoveContainer" containerID="1a837c74d8c8977a86c74839c544fee61e72d58cfc2be715d8c463cb4d09c5d8" Dec 05 17:45:55 crc kubenswrapper[4753]: E1205 17:45:55.722927 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 17:45:56 crc kubenswrapper[4753]: I1205 17:45:56.472145 4753 generic.go:334] "Generic (PLEG): container finished" podID="825da353-e856-45ac-9cff-027d1f16663a" containerID="1d3961afdbb04a0cf4d9477547d8bdbb5cd4e9f122cdb8db947ce595fb145ed8" exitCode=0 Dec 05 17:45:56 crc kubenswrapper[4753]: I1205 17:45:56.472289 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-2drf7" event={"ID":"825da353-e856-45ac-9cff-027d1f16663a","Type":"ContainerDied","Data":"1d3961afdbb04a0cf4d9477547d8bdbb5cd4e9f122cdb8db947ce595fb145ed8"} Dec 05 17:45:58 crc kubenswrapper[4753]: I1205 17:45:58.019326 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-2drf7" Dec 05 17:45:58 crc kubenswrapper[4753]: I1205 17:45:58.190713 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/825da353-e856-45ac-9cff-027d1f16663a-ssh-key\") pod \"825da353-e856-45ac-9cff-027d1f16663a\" (UID: \"825da353-e856-45ac-9cff-027d1f16663a\") " Dec 05 17:45:58 crc kubenswrapper[4753]: I1205 17:45:58.190924 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c4698\" (UniqueName: \"kubernetes.io/projected/825da353-e856-45ac-9cff-027d1f16663a-kube-api-access-c4698\") pod \"825da353-e856-45ac-9cff-027d1f16663a\" (UID: \"825da353-e856-45ac-9cff-027d1f16663a\") " Dec 05 17:45:58 crc kubenswrapper[4753]: I1205 17:45:58.191041 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/825da353-e856-45ac-9cff-027d1f16663a-inventory\") pod \"825da353-e856-45ac-9cff-027d1f16663a\" (UID: \"825da353-e856-45ac-9cff-027d1f16663a\") " Dec 05 17:45:58 crc kubenswrapper[4753]: I1205 17:45:58.200452 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/825da353-e856-45ac-9cff-027d1f16663a-kube-api-access-c4698" (OuterVolumeSpecName: "kube-api-access-c4698") pod "825da353-e856-45ac-9cff-027d1f16663a" (UID: "825da353-e856-45ac-9cff-027d1f16663a"). InnerVolumeSpecName "kube-api-access-c4698". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:45:58 crc kubenswrapper[4753]: I1205 17:45:58.224820 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/825da353-e856-45ac-9cff-027d1f16663a-inventory" (OuterVolumeSpecName: "inventory") pod "825da353-e856-45ac-9cff-027d1f16663a" (UID: "825da353-e856-45ac-9cff-027d1f16663a"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:45:58 crc kubenswrapper[4753]: I1205 17:45:58.226597 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/825da353-e856-45ac-9cff-027d1f16663a-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "825da353-e856-45ac-9cff-027d1f16663a" (UID: "825da353-e856-45ac-9cff-027d1f16663a"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:45:58 crc kubenswrapper[4753]: I1205 17:45:58.293724 4753 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/825da353-e856-45ac-9cff-027d1f16663a-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 17:45:58 crc kubenswrapper[4753]: I1205 17:45:58.293757 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c4698\" (UniqueName: \"kubernetes.io/projected/825da353-e856-45ac-9cff-027d1f16663a-kube-api-access-c4698\") on node \"crc\" DevicePath \"\"" Dec 05 17:45:58 crc kubenswrapper[4753]: I1205 17:45:58.293768 4753 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/825da353-e856-45ac-9cff-027d1f16663a-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 17:45:58 crc kubenswrapper[4753]: I1205 17:45:58.498276 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-2drf7" event={"ID":"825da353-e856-45ac-9cff-027d1f16663a","Type":"ContainerDied","Data":"30bfb03bc71d413eda86252adbada3c9aa4791f79b7228236cca12c05ea23854"} Dec 05 17:45:58 crc kubenswrapper[4753]: I1205 17:45:58.498315 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="30bfb03bc71d413eda86252adbada3c9aa4791f79b7228236cca12c05ea23854" Dec 05 17:45:58 crc kubenswrapper[4753]: I1205 17:45:58.498401 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-2drf7" Dec 05 17:45:58 crc kubenswrapper[4753]: I1205 17:45:58.591700 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-nhc8m"] Dec 05 17:45:58 crc kubenswrapper[4753]: E1205 17:45:58.592227 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="684390bd-4ec8-4f9c-aeea-ac8dc935bf88" containerName="registry-server" Dec 05 17:45:58 crc kubenswrapper[4753]: I1205 17:45:58.592249 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="684390bd-4ec8-4f9c-aeea-ac8dc935bf88" containerName="registry-server" Dec 05 17:45:58 crc kubenswrapper[4753]: E1205 17:45:58.592263 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="684390bd-4ec8-4f9c-aeea-ac8dc935bf88" containerName="extract-utilities" Dec 05 17:45:58 crc kubenswrapper[4753]: I1205 17:45:58.592273 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="684390bd-4ec8-4f9c-aeea-ac8dc935bf88" containerName="extract-utilities" Dec 05 17:45:58 crc kubenswrapper[4753]: E1205 17:45:58.592297 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="684390bd-4ec8-4f9c-aeea-ac8dc935bf88" containerName="extract-content" Dec 05 17:45:58 crc kubenswrapper[4753]: I1205 17:45:58.592305 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="684390bd-4ec8-4f9c-aeea-ac8dc935bf88" containerName="extract-content" Dec 05 17:45:58 crc kubenswrapper[4753]: E1205 17:45:58.592325 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="825da353-e856-45ac-9cff-027d1f16663a" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 05 17:45:58 crc kubenswrapper[4753]: I1205 17:45:58.592334 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="825da353-e856-45ac-9cff-027d1f16663a" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 05 17:45:58 crc kubenswrapper[4753]: I1205 17:45:58.592565 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="684390bd-4ec8-4f9c-aeea-ac8dc935bf88" containerName="registry-server" Dec 05 17:45:58 crc kubenswrapper[4753]: I1205 17:45:58.592589 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="825da353-e856-45ac-9cff-027d1f16663a" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 05 17:45:58 crc kubenswrapper[4753]: I1205 17:45:58.593441 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-nhc8m" Dec 05 17:45:58 crc kubenswrapper[4753]: I1205 17:45:58.595478 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 17:45:58 crc kubenswrapper[4753]: I1205 17:45:58.596338 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 17:45:58 crc kubenswrapper[4753]: I1205 17:45:58.597618 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 17:45:58 crc kubenswrapper[4753]: I1205 17:45:58.597865 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-2445p" Dec 05 17:45:58 crc kubenswrapper[4753]: I1205 17:45:58.623475 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-nhc8m"] Dec 05 17:45:58 crc kubenswrapper[4753]: I1205 17:45:58.702402 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gthrz\" (UniqueName: \"kubernetes.io/projected/9e4d29b1-9d77-4744-85ed-e6882651cea9-kube-api-access-gthrz\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-nhc8m\" (UID: \"9e4d29b1-9d77-4744-85ed-e6882651cea9\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-nhc8m" Dec 05 17:45:58 crc kubenswrapper[4753]: I1205 17:45:58.702499 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9e4d29b1-9d77-4744-85ed-e6882651cea9-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-nhc8m\" (UID: \"9e4d29b1-9d77-4744-85ed-e6882651cea9\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-nhc8m" Dec 05 17:45:58 crc kubenswrapper[4753]: I1205 17:45:58.702775 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9e4d29b1-9d77-4744-85ed-e6882651cea9-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-nhc8m\" (UID: \"9e4d29b1-9d77-4744-85ed-e6882651cea9\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-nhc8m" Dec 05 17:45:58 crc kubenswrapper[4753]: I1205 17:45:58.804715 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9e4d29b1-9d77-4744-85ed-e6882651cea9-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-nhc8m\" (UID: \"9e4d29b1-9d77-4744-85ed-e6882651cea9\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-nhc8m" Dec 05 17:45:58 crc kubenswrapper[4753]: I1205 17:45:58.804817 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9e4d29b1-9d77-4744-85ed-e6882651cea9-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-nhc8m\" (UID: \"9e4d29b1-9d77-4744-85ed-e6882651cea9\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-nhc8m" Dec 05 17:45:58 crc kubenswrapper[4753]: I1205 17:45:58.804968 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gthrz\" (UniqueName: \"kubernetes.io/projected/9e4d29b1-9d77-4744-85ed-e6882651cea9-kube-api-access-gthrz\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-nhc8m\" (UID: \"9e4d29b1-9d77-4744-85ed-e6882651cea9\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-nhc8m" Dec 05 17:45:58 crc kubenswrapper[4753]: I1205 17:45:58.809008 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9e4d29b1-9d77-4744-85ed-e6882651cea9-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-nhc8m\" (UID: \"9e4d29b1-9d77-4744-85ed-e6882651cea9\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-nhc8m" Dec 05 17:45:58 crc kubenswrapper[4753]: I1205 17:45:58.809177 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9e4d29b1-9d77-4744-85ed-e6882651cea9-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-nhc8m\" (UID: \"9e4d29b1-9d77-4744-85ed-e6882651cea9\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-nhc8m" Dec 05 17:45:58 crc kubenswrapper[4753]: I1205 17:45:58.837372 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gthrz\" (UniqueName: \"kubernetes.io/projected/9e4d29b1-9d77-4744-85ed-e6882651cea9-kube-api-access-gthrz\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-nhc8m\" (UID: \"9e4d29b1-9d77-4744-85ed-e6882651cea9\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-nhc8m" Dec 05 17:45:58 crc kubenswrapper[4753]: I1205 17:45:58.916741 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-nhc8m" Dec 05 17:45:59 crc kubenswrapper[4753]: I1205 17:45:59.619489 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-nhc8m"] Dec 05 17:46:00 crc kubenswrapper[4753]: I1205 17:46:00.522110 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-nhc8m" event={"ID":"9e4d29b1-9d77-4744-85ed-e6882651cea9","Type":"ContainerStarted","Data":"8f8d936537c56fab6e76994b58281258357fcc7a3b8b6748bc47ad4de09371ef"} Dec 05 17:46:01 crc kubenswrapper[4753]: I1205 17:46:01.536645 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-nhc8m" event={"ID":"9e4d29b1-9d77-4744-85ed-e6882651cea9","Type":"ContainerStarted","Data":"5ecdbe5bd9db6421c2c13e8bac1c6fec8d179df84f613591e2299dd16b63b8f6"} Dec 05 17:46:01 crc kubenswrapper[4753]: I1205 17:46:01.559019 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-nhc8m" podStartSLOduration=2.588266299 podStartE2EDuration="3.559004715s" podCreationTimestamp="2025-12-05 17:45:58 +0000 UTC" firstStartedPulling="2025-12-05 17:45:59.642255882 +0000 UTC m=+2498.145362888" lastFinishedPulling="2025-12-05 17:46:00.612994288 +0000 UTC m=+2499.116101304" observedRunningTime="2025-12-05 17:46:01.553416907 +0000 UTC m=+2500.056523953" watchObservedRunningTime="2025-12-05 17:46:01.559004715 +0000 UTC m=+2500.062111721" Dec 05 17:46:09 crc kubenswrapper[4753]: I1205 17:46:09.721720 4753 scope.go:117] "RemoveContainer" containerID="1a837c74d8c8977a86c74839c544fee61e72d58cfc2be715d8c463cb4d09c5d8" Dec 05 17:46:09 crc kubenswrapper[4753]: E1205 17:46:09.722825 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 17:46:12 crc kubenswrapper[4753]: I1205 17:46:12.059678 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-z8vmq"] Dec 05 17:46:12 crc kubenswrapper[4753]: I1205 17:46:12.075356 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-z8vmq"] Dec 05 17:46:13 crc kubenswrapper[4753]: I1205 17:46:13.742180 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="903b9bd6-7d66-4fbf-8dc6-dfd6a57bab2b" path="/var/lib/kubelet/pods/903b9bd6-7d66-4fbf-8dc6-dfd6a57bab2b/volumes" Dec 05 17:46:20 crc kubenswrapper[4753]: I1205 17:46:20.722033 4753 scope.go:117] "RemoveContainer" containerID="1a837c74d8c8977a86c74839c544fee61e72d58cfc2be715d8c463cb4d09c5d8" Dec 05 17:46:20 crc kubenswrapper[4753]: E1205 17:46:20.723359 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 17:46:30 crc kubenswrapper[4753]: I1205 17:46:30.763789 4753 scope.go:117] "RemoveContainer" containerID="abf59d661542894af680c2f2c771713ff34a2f765563f33ac37718a60faa7e2c" Dec 05 17:46:30 crc kubenswrapper[4753]: I1205 17:46:30.818359 4753 scope.go:117] "RemoveContainer" containerID="83d17ce61da5fa359a7f1c3eb57d36f1a3f0fd913b9f999b59d4e1fc201fa205" Dec 05 17:46:30 crc kubenswrapper[4753]: I1205 17:46:30.886696 4753 scope.go:117] "RemoveContainer" containerID="11ee963d9277da03301c99cb1ed7e9f5ddf7286cc262069817bd8138af0ee490" Dec 05 17:46:30 crc kubenswrapper[4753]: I1205 17:46:30.930865 4753 scope.go:117] "RemoveContainer" containerID="1e432b8956b70c86da42c88ebc4f91ad24587d8748b9b21ab70af6fd9b21e073" Dec 05 17:46:30 crc kubenswrapper[4753]: I1205 17:46:30.969901 4753 scope.go:117] "RemoveContainer" containerID="3f192e10789b7558bdbed59f6ef14b0147358717debd0e32f57ce049fc9cff50" Dec 05 17:46:31 crc kubenswrapper[4753]: I1205 17:46:31.053874 4753 scope.go:117] "RemoveContainer" containerID="852c80aed050ae6afcd79a565b028380534b80ba8c6b19da7ea2df168102a5d6" Dec 05 17:46:31 crc kubenswrapper[4753]: I1205 17:46:31.061702 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-h8pkm"] Dec 05 17:46:31 crc kubenswrapper[4753]: I1205 17:46:31.075969 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-h8pkm"] Dec 05 17:46:31 crc kubenswrapper[4753]: I1205 17:46:31.086138 4753 scope.go:117] "RemoveContainer" containerID="c81edf8500118e047a560e89ae9db6aa858a333b949cf7faea5dca3fa5d4756f" Dec 05 17:46:31 crc kubenswrapper[4753]: I1205 17:46:31.729957 4753 scope.go:117] "RemoveContainer" containerID="1a837c74d8c8977a86c74839c544fee61e72d58cfc2be715d8c463cb4d09c5d8" Dec 05 17:46:31 crc kubenswrapper[4753]: E1205 17:46:31.731178 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 17:46:31 crc kubenswrapper[4753]: I1205 17:46:31.740949 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="db47010e-762f-430c-b384-215d4ea30192" path="/var/lib/kubelet/pods/db47010e-762f-430c-b384-215d4ea30192/volumes" Dec 05 17:46:33 crc kubenswrapper[4753]: I1205 17:46:33.040537 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-mgmpb"] Dec 05 17:46:33 crc kubenswrapper[4753]: I1205 17:46:33.052570 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-mgmpb"] Dec 05 17:46:33 crc kubenswrapper[4753]: I1205 17:46:33.744976 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="92da46a9-fbb7-4c2b-a26d-22065b21a23a" path="/var/lib/kubelet/pods/92da46a9-fbb7-4c2b-a26d-22065b21a23a/volumes" Dec 05 17:46:44 crc kubenswrapper[4753]: I1205 17:46:44.721584 4753 scope.go:117] "RemoveContainer" containerID="1a837c74d8c8977a86c74839c544fee61e72d58cfc2be715d8c463cb4d09c5d8" Dec 05 17:46:44 crc kubenswrapper[4753]: E1205 17:46:44.723102 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 17:46:55 crc kubenswrapper[4753]: I1205 17:46:55.721895 4753 scope.go:117] "RemoveContainer" containerID="1a837c74d8c8977a86c74839c544fee61e72d58cfc2be715d8c463cb4d09c5d8" Dec 05 17:46:55 crc kubenswrapper[4753]: E1205 17:46:55.723017 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 17:47:06 crc kubenswrapper[4753]: I1205 17:47:06.720337 4753 scope.go:117] "RemoveContainer" containerID="1a837c74d8c8977a86c74839c544fee61e72d58cfc2be715d8c463cb4d09c5d8" Dec 05 17:47:06 crc kubenswrapper[4753]: E1205 17:47:06.720991 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 17:47:18 crc kubenswrapper[4753]: I1205 17:47:18.042907 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-2wp87"] Dec 05 17:47:18 crc kubenswrapper[4753]: I1205 17:47:18.052164 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-2wp87"] Dec 05 17:47:19 crc kubenswrapper[4753]: I1205 17:47:19.720887 4753 scope.go:117] "RemoveContainer" containerID="1a837c74d8c8977a86c74839c544fee61e72d58cfc2be715d8c463cb4d09c5d8" Dec 05 17:47:19 crc kubenswrapper[4753]: E1205 17:47:19.721637 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 17:47:19 crc kubenswrapper[4753]: I1205 17:47:19.735461 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4ec2b050-b2dd-4c44-9d5b-29babc0e4d0b" path="/var/lib/kubelet/pods/4ec2b050-b2dd-4c44-9d5b-29babc0e4d0b/volumes" Dec 05 17:47:31 crc kubenswrapper[4753]: I1205 17:47:31.231448 4753 scope.go:117] "RemoveContainer" containerID="e0368f1e24c0e1f2b65d96f6c4f0ce9fe6637474024a019d438ee2e8d82a1ec6" Dec 05 17:47:31 crc kubenswrapper[4753]: I1205 17:47:31.277573 4753 scope.go:117] "RemoveContainer" containerID="fe889ebfa403e966a3c6f1989b49cc16d95b97030b92a4ebac4ae6a38762fa88" Dec 05 17:47:31 crc kubenswrapper[4753]: I1205 17:47:31.319298 4753 scope.go:117] "RemoveContainer" containerID="60f81511a25640815c4a1caecb42650016dab8ca75a64b978b891dd3577eefd7" Dec 05 17:47:31 crc kubenswrapper[4753]: I1205 17:47:31.728627 4753 scope.go:117] "RemoveContainer" containerID="1a837c74d8c8977a86c74839c544fee61e72d58cfc2be715d8c463cb4d09c5d8" Dec 05 17:47:31 crc kubenswrapper[4753]: E1205 17:47:31.728987 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 17:47:36 crc kubenswrapper[4753]: I1205 17:47:36.607183 4753 generic.go:334] "Generic (PLEG): container finished" podID="9e4d29b1-9d77-4744-85ed-e6882651cea9" containerID="5ecdbe5bd9db6421c2c13e8bac1c6fec8d179df84f613591e2299dd16b63b8f6" exitCode=0 Dec 05 17:47:36 crc kubenswrapper[4753]: I1205 17:47:36.607310 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-nhc8m" event={"ID":"9e4d29b1-9d77-4744-85ed-e6882651cea9","Type":"ContainerDied","Data":"5ecdbe5bd9db6421c2c13e8bac1c6fec8d179df84f613591e2299dd16b63b8f6"} Dec 05 17:47:38 crc kubenswrapper[4753]: I1205 17:47:38.163995 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-nhc8m" Dec 05 17:47:38 crc kubenswrapper[4753]: I1205 17:47:38.355585 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9e4d29b1-9d77-4744-85ed-e6882651cea9-ssh-key\") pod \"9e4d29b1-9d77-4744-85ed-e6882651cea9\" (UID: \"9e4d29b1-9d77-4744-85ed-e6882651cea9\") " Dec 05 17:47:38 crc kubenswrapper[4753]: I1205 17:47:38.355806 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9e4d29b1-9d77-4744-85ed-e6882651cea9-inventory\") pod \"9e4d29b1-9d77-4744-85ed-e6882651cea9\" (UID: \"9e4d29b1-9d77-4744-85ed-e6882651cea9\") " Dec 05 17:47:38 crc kubenswrapper[4753]: I1205 17:47:38.355858 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gthrz\" (UniqueName: \"kubernetes.io/projected/9e4d29b1-9d77-4744-85ed-e6882651cea9-kube-api-access-gthrz\") pod \"9e4d29b1-9d77-4744-85ed-e6882651cea9\" (UID: \"9e4d29b1-9d77-4744-85ed-e6882651cea9\") " Dec 05 17:47:38 crc kubenswrapper[4753]: I1205 17:47:38.361420 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9e4d29b1-9d77-4744-85ed-e6882651cea9-kube-api-access-gthrz" (OuterVolumeSpecName: "kube-api-access-gthrz") pod "9e4d29b1-9d77-4744-85ed-e6882651cea9" (UID: "9e4d29b1-9d77-4744-85ed-e6882651cea9"). InnerVolumeSpecName "kube-api-access-gthrz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:47:38 crc kubenswrapper[4753]: I1205 17:47:38.386690 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e4d29b1-9d77-4744-85ed-e6882651cea9-inventory" (OuterVolumeSpecName: "inventory") pod "9e4d29b1-9d77-4744-85ed-e6882651cea9" (UID: "9e4d29b1-9d77-4744-85ed-e6882651cea9"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:47:38 crc kubenswrapper[4753]: I1205 17:47:38.389703 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e4d29b1-9d77-4744-85ed-e6882651cea9-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "9e4d29b1-9d77-4744-85ed-e6882651cea9" (UID: "9e4d29b1-9d77-4744-85ed-e6882651cea9"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:47:38 crc kubenswrapper[4753]: I1205 17:47:38.461391 4753 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9e4d29b1-9d77-4744-85ed-e6882651cea9-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 17:47:38 crc kubenswrapper[4753]: I1205 17:47:38.461465 4753 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9e4d29b1-9d77-4744-85ed-e6882651cea9-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 17:47:38 crc kubenswrapper[4753]: I1205 17:47:38.461493 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gthrz\" (UniqueName: \"kubernetes.io/projected/9e4d29b1-9d77-4744-85ed-e6882651cea9-kube-api-access-gthrz\") on node \"crc\" DevicePath \"\"" Dec 05 17:47:38 crc kubenswrapper[4753]: I1205 17:47:38.633283 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-nhc8m" event={"ID":"9e4d29b1-9d77-4744-85ed-e6882651cea9","Type":"ContainerDied","Data":"8f8d936537c56fab6e76994b58281258357fcc7a3b8b6748bc47ad4de09371ef"} Dec 05 17:47:38 crc kubenswrapper[4753]: I1205 17:47:38.633325 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8f8d936537c56fab6e76994b58281258357fcc7a3b8b6748bc47ad4de09371ef" Dec 05 17:47:38 crc kubenswrapper[4753]: I1205 17:47:38.633346 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-nhc8m" Dec 05 17:47:38 crc kubenswrapper[4753]: I1205 17:47:38.739785 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ngt2w"] Dec 05 17:47:38 crc kubenswrapper[4753]: E1205 17:47:38.741505 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e4d29b1-9d77-4744-85ed-e6882651cea9" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 05 17:47:38 crc kubenswrapper[4753]: I1205 17:47:38.741552 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e4d29b1-9d77-4744-85ed-e6882651cea9" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 05 17:47:38 crc kubenswrapper[4753]: I1205 17:47:38.742095 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e4d29b1-9d77-4744-85ed-e6882651cea9" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 05 17:47:38 crc kubenswrapper[4753]: I1205 17:47:38.748395 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ngt2w" Dec 05 17:47:38 crc kubenswrapper[4753]: I1205 17:47:38.748903 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ngt2w"] Dec 05 17:47:38 crc kubenswrapper[4753]: I1205 17:47:38.750862 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-2445p" Dec 05 17:47:38 crc kubenswrapper[4753]: I1205 17:47:38.751182 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 17:47:38 crc kubenswrapper[4753]: I1205 17:47:38.751341 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 17:47:38 crc kubenswrapper[4753]: I1205 17:47:38.751447 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 17:47:38 crc kubenswrapper[4753]: I1205 17:47:38.770623 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c239ce8f-d247-46bb-889b-914ff6f8ab64-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-ngt2w\" (UID: \"c239ce8f-d247-46bb-889b-914ff6f8ab64\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ngt2w" Dec 05 17:47:38 crc kubenswrapper[4753]: I1205 17:47:38.770731 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q94f5\" (UniqueName: \"kubernetes.io/projected/c239ce8f-d247-46bb-889b-914ff6f8ab64-kube-api-access-q94f5\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-ngt2w\" (UID: \"c239ce8f-d247-46bb-889b-914ff6f8ab64\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ngt2w" Dec 05 17:47:38 crc kubenswrapper[4753]: I1205 17:47:38.770791 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c239ce8f-d247-46bb-889b-914ff6f8ab64-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-ngt2w\" (UID: \"c239ce8f-d247-46bb-889b-914ff6f8ab64\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ngt2w" Dec 05 17:47:38 crc kubenswrapper[4753]: I1205 17:47:38.872229 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c239ce8f-d247-46bb-889b-914ff6f8ab64-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-ngt2w\" (UID: \"c239ce8f-d247-46bb-889b-914ff6f8ab64\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ngt2w" Dec 05 17:47:38 crc kubenswrapper[4753]: I1205 17:47:38.872446 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c239ce8f-d247-46bb-889b-914ff6f8ab64-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-ngt2w\" (UID: \"c239ce8f-d247-46bb-889b-914ff6f8ab64\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ngt2w" Dec 05 17:47:38 crc kubenswrapper[4753]: I1205 17:47:38.872541 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q94f5\" (UniqueName: \"kubernetes.io/projected/c239ce8f-d247-46bb-889b-914ff6f8ab64-kube-api-access-q94f5\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-ngt2w\" (UID: \"c239ce8f-d247-46bb-889b-914ff6f8ab64\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ngt2w" Dec 05 17:47:38 crc kubenswrapper[4753]: I1205 17:47:38.875620 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c239ce8f-d247-46bb-889b-914ff6f8ab64-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-ngt2w\" (UID: \"c239ce8f-d247-46bb-889b-914ff6f8ab64\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ngt2w" Dec 05 17:47:38 crc kubenswrapper[4753]: I1205 17:47:38.891220 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c239ce8f-d247-46bb-889b-914ff6f8ab64-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-ngt2w\" (UID: \"c239ce8f-d247-46bb-889b-914ff6f8ab64\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ngt2w" Dec 05 17:47:38 crc kubenswrapper[4753]: I1205 17:47:38.893506 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q94f5\" (UniqueName: \"kubernetes.io/projected/c239ce8f-d247-46bb-889b-914ff6f8ab64-kube-api-access-q94f5\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-ngt2w\" (UID: \"c239ce8f-d247-46bb-889b-914ff6f8ab64\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ngt2w" Dec 05 17:47:39 crc kubenswrapper[4753]: I1205 17:47:39.082367 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ngt2w" Dec 05 17:47:39 crc kubenswrapper[4753]: I1205 17:47:39.646512 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ngt2w"] Dec 05 17:47:39 crc kubenswrapper[4753]: W1205 17:47:39.653865 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc239ce8f_d247_46bb_889b_914ff6f8ab64.slice/crio-0e84d7e908755b66949d7f98ea45b17f152fc06471d045cf754fdefe80ff6a24 WatchSource:0}: Error finding container 0e84d7e908755b66949d7f98ea45b17f152fc06471d045cf754fdefe80ff6a24: Status 404 returned error can't find the container with id 0e84d7e908755b66949d7f98ea45b17f152fc06471d045cf754fdefe80ff6a24 Dec 05 17:47:40 crc kubenswrapper[4753]: I1205 17:47:40.661869 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ngt2w" event={"ID":"c239ce8f-d247-46bb-889b-914ff6f8ab64","Type":"ContainerStarted","Data":"8e9864d4af2ea20e5285cd0e2f74f875a7d60e0868269e9b47f5840044db773f"} Dec 05 17:47:40 crc kubenswrapper[4753]: I1205 17:47:40.662287 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ngt2w" event={"ID":"c239ce8f-d247-46bb-889b-914ff6f8ab64","Type":"ContainerStarted","Data":"0e84d7e908755b66949d7f98ea45b17f152fc06471d045cf754fdefe80ff6a24"} Dec 05 17:47:40 crc kubenswrapper[4753]: I1205 17:47:40.694559 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ngt2w" podStartSLOduration=2.188894682 podStartE2EDuration="2.694539955s" podCreationTimestamp="2025-12-05 17:47:38 +0000 UTC" firstStartedPulling="2025-12-05 17:47:39.659320524 +0000 UTC m=+2598.162427530" lastFinishedPulling="2025-12-05 17:47:40.164965787 +0000 UTC m=+2598.668072803" observedRunningTime="2025-12-05 17:47:40.693976729 +0000 UTC m=+2599.197083745" watchObservedRunningTime="2025-12-05 17:47:40.694539955 +0000 UTC m=+2599.197646981" Dec 05 17:47:45 crc kubenswrapper[4753]: I1205 17:47:45.718548 4753 generic.go:334] "Generic (PLEG): container finished" podID="c239ce8f-d247-46bb-889b-914ff6f8ab64" containerID="8e9864d4af2ea20e5285cd0e2f74f875a7d60e0868269e9b47f5840044db773f" exitCode=0 Dec 05 17:47:45 crc kubenswrapper[4753]: I1205 17:47:45.718623 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ngt2w" event={"ID":"c239ce8f-d247-46bb-889b-914ff6f8ab64","Type":"ContainerDied","Data":"8e9864d4af2ea20e5285cd0e2f74f875a7d60e0868269e9b47f5840044db773f"} Dec 05 17:47:46 crc kubenswrapper[4753]: I1205 17:47:46.720901 4753 scope.go:117] "RemoveContainer" containerID="1a837c74d8c8977a86c74839c544fee61e72d58cfc2be715d8c463cb4d09c5d8" Dec 05 17:47:46 crc kubenswrapper[4753]: E1205 17:47:46.721508 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 17:47:47 crc kubenswrapper[4753]: I1205 17:47:47.223247 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ngt2w" Dec 05 17:47:47 crc kubenswrapper[4753]: I1205 17:47:47.385647 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q94f5\" (UniqueName: \"kubernetes.io/projected/c239ce8f-d247-46bb-889b-914ff6f8ab64-kube-api-access-q94f5\") pod \"c239ce8f-d247-46bb-889b-914ff6f8ab64\" (UID: \"c239ce8f-d247-46bb-889b-914ff6f8ab64\") " Dec 05 17:47:47 crc kubenswrapper[4753]: I1205 17:47:47.385718 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c239ce8f-d247-46bb-889b-914ff6f8ab64-inventory\") pod \"c239ce8f-d247-46bb-889b-914ff6f8ab64\" (UID: \"c239ce8f-d247-46bb-889b-914ff6f8ab64\") " Dec 05 17:47:47 crc kubenswrapper[4753]: I1205 17:47:47.385811 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c239ce8f-d247-46bb-889b-914ff6f8ab64-ssh-key\") pod \"c239ce8f-d247-46bb-889b-914ff6f8ab64\" (UID: \"c239ce8f-d247-46bb-889b-914ff6f8ab64\") " Dec 05 17:47:47 crc kubenswrapper[4753]: I1205 17:47:47.393556 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c239ce8f-d247-46bb-889b-914ff6f8ab64-kube-api-access-q94f5" (OuterVolumeSpecName: "kube-api-access-q94f5") pod "c239ce8f-d247-46bb-889b-914ff6f8ab64" (UID: "c239ce8f-d247-46bb-889b-914ff6f8ab64"). InnerVolumeSpecName "kube-api-access-q94f5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:47:47 crc kubenswrapper[4753]: I1205 17:47:47.414350 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c239ce8f-d247-46bb-889b-914ff6f8ab64-inventory" (OuterVolumeSpecName: "inventory") pod "c239ce8f-d247-46bb-889b-914ff6f8ab64" (UID: "c239ce8f-d247-46bb-889b-914ff6f8ab64"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:47:47 crc kubenswrapper[4753]: I1205 17:47:47.417110 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c239ce8f-d247-46bb-889b-914ff6f8ab64-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "c239ce8f-d247-46bb-889b-914ff6f8ab64" (UID: "c239ce8f-d247-46bb-889b-914ff6f8ab64"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:47:47 crc kubenswrapper[4753]: I1205 17:47:47.488883 4753 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c239ce8f-d247-46bb-889b-914ff6f8ab64-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 17:47:47 crc kubenswrapper[4753]: I1205 17:47:47.488916 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q94f5\" (UniqueName: \"kubernetes.io/projected/c239ce8f-d247-46bb-889b-914ff6f8ab64-kube-api-access-q94f5\") on node \"crc\" DevicePath \"\"" Dec 05 17:47:47 crc kubenswrapper[4753]: I1205 17:47:47.488928 4753 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c239ce8f-d247-46bb-889b-914ff6f8ab64-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 17:47:47 crc kubenswrapper[4753]: I1205 17:47:47.750074 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ngt2w" event={"ID":"c239ce8f-d247-46bb-889b-914ff6f8ab64","Type":"ContainerDied","Data":"0e84d7e908755b66949d7f98ea45b17f152fc06471d045cf754fdefe80ff6a24"} Dec 05 17:47:47 crc kubenswrapper[4753]: I1205 17:47:47.750982 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0e84d7e908755b66949d7f98ea45b17f152fc06471d045cf754fdefe80ff6a24" Dec 05 17:47:47 crc kubenswrapper[4753]: I1205 17:47:47.750133 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-ngt2w" Dec 05 17:47:47 crc kubenswrapper[4753]: I1205 17:47:47.824391 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-7gbc5"] Dec 05 17:47:47 crc kubenswrapper[4753]: E1205 17:47:47.824990 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c239ce8f-d247-46bb-889b-914ff6f8ab64" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 05 17:47:47 crc kubenswrapper[4753]: I1205 17:47:47.825016 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="c239ce8f-d247-46bb-889b-914ff6f8ab64" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 05 17:47:47 crc kubenswrapper[4753]: I1205 17:47:47.825318 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="c239ce8f-d247-46bb-889b-914ff6f8ab64" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 05 17:47:47 crc kubenswrapper[4753]: I1205 17:47:47.826058 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-7gbc5" Dec 05 17:47:47 crc kubenswrapper[4753]: I1205 17:47:47.827904 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 17:47:47 crc kubenswrapper[4753]: I1205 17:47:47.828248 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 17:47:47 crc kubenswrapper[4753]: I1205 17:47:47.828421 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-2445p" Dec 05 17:47:47 crc kubenswrapper[4753]: I1205 17:47:47.828751 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 17:47:47 crc kubenswrapper[4753]: I1205 17:47:47.859416 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-7gbc5"] Dec 05 17:47:47 crc kubenswrapper[4753]: I1205 17:47:47.998651 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5678941d-59cd-487e-82a4-f2cf0bf528a7-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-7gbc5\" (UID: \"5678941d-59cd-487e-82a4-f2cf0bf528a7\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-7gbc5" Dec 05 17:47:47 crc kubenswrapper[4753]: I1205 17:47:47.998729 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5678941d-59cd-487e-82a4-f2cf0bf528a7-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-7gbc5\" (UID: \"5678941d-59cd-487e-82a4-f2cf0bf528a7\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-7gbc5" Dec 05 17:47:47 crc kubenswrapper[4753]: I1205 17:47:47.998798 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fhhnp\" (UniqueName: \"kubernetes.io/projected/5678941d-59cd-487e-82a4-f2cf0bf528a7-kube-api-access-fhhnp\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-7gbc5\" (UID: \"5678941d-59cd-487e-82a4-f2cf0bf528a7\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-7gbc5" Dec 05 17:47:48 crc kubenswrapper[4753]: I1205 17:47:48.100326 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5678941d-59cd-487e-82a4-f2cf0bf528a7-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-7gbc5\" (UID: \"5678941d-59cd-487e-82a4-f2cf0bf528a7\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-7gbc5" Dec 05 17:47:48 crc kubenswrapper[4753]: I1205 17:47:48.100416 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5678941d-59cd-487e-82a4-f2cf0bf528a7-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-7gbc5\" (UID: \"5678941d-59cd-487e-82a4-f2cf0bf528a7\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-7gbc5" Dec 05 17:47:48 crc kubenswrapper[4753]: I1205 17:47:48.100484 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fhhnp\" (UniqueName: \"kubernetes.io/projected/5678941d-59cd-487e-82a4-f2cf0bf528a7-kube-api-access-fhhnp\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-7gbc5\" (UID: \"5678941d-59cd-487e-82a4-f2cf0bf528a7\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-7gbc5" Dec 05 17:47:48 crc kubenswrapper[4753]: I1205 17:47:48.104543 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5678941d-59cd-487e-82a4-f2cf0bf528a7-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-7gbc5\" (UID: \"5678941d-59cd-487e-82a4-f2cf0bf528a7\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-7gbc5" Dec 05 17:47:48 crc kubenswrapper[4753]: I1205 17:47:48.104702 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5678941d-59cd-487e-82a4-f2cf0bf528a7-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-7gbc5\" (UID: \"5678941d-59cd-487e-82a4-f2cf0bf528a7\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-7gbc5" Dec 05 17:47:48 crc kubenswrapper[4753]: I1205 17:47:48.124207 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fhhnp\" (UniqueName: \"kubernetes.io/projected/5678941d-59cd-487e-82a4-f2cf0bf528a7-kube-api-access-fhhnp\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-7gbc5\" (UID: \"5678941d-59cd-487e-82a4-f2cf0bf528a7\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-7gbc5" Dec 05 17:47:48 crc kubenswrapper[4753]: I1205 17:47:48.150319 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-7gbc5" Dec 05 17:47:48 crc kubenswrapper[4753]: I1205 17:47:48.711279 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-7gbc5"] Dec 05 17:47:48 crc kubenswrapper[4753]: W1205 17:47:48.715509 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5678941d_59cd_487e_82a4_f2cf0bf528a7.slice/crio-1f4c89bfb8fff660e020e847414e07fa0bbbc183f86b865e2b1e59ec6d94d367 WatchSource:0}: Error finding container 1f4c89bfb8fff660e020e847414e07fa0bbbc183f86b865e2b1e59ec6d94d367: Status 404 returned error can't find the container with id 1f4c89bfb8fff660e020e847414e07fa0bbbc183f86b865e2b1e59ec6d94d367 Dec 05 17:47:48 crc kubenswrapper[4753]: I1205 17:47:48.758978 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-7gbc5" event={"ID":"5678941d-59cd-487e-82a4-f2cf0bf528a7","Type":"ContainerStarted","Data":"1f4c89bfb8fff660e020e847414e07fa0bbbc183f86b865e2b1e59ec6d94d367"} Dec 05 17:47:49 crc kubenswrapper[4753]: I1205 17:47:49.828711 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-7gbc5" event={"ID":"5678941d-59cd-487e-82a4-f2cf0bf528a7","Type":"ContainerStarted","Data":"536643605b9875db45544b905da29919e4c08faccb806dce63f7d0b7d8d9c59e"} Dec 05 17:47:57 crc kubenswrapper[4753]: I1205 17:47:57.721935 4753 scope.go:117] "RemoveContainer" containerID="1a837c74d8c8977a86c74839c544fee61e72d58cfc2be715d8c463cb4d09c5d8" Dec 05 17:47:57 crc kubenswrapper[4753]: E1205 17:47:57.723082 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 17:48:09 crc kubenswrapper[4753]: I1205 17:48:09.720730 4753 scope.go:117] "RemoveContainer" containerID="1a837c74d8c8977a86c74839c544fee61e72d58cfc2be715d8c463cb4d09c5d8" Dec 05 17:48:09 crc kubenswrapper[4753]: E1205 17:48:09.721455 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 17:48:20 crc kubenswrapper[4753]: I1205 17:48:20.720961 4753 scope.go:117] "RemoveContainer" containerID="1a837c74d8c8977a86c74839c544fee61e72d58cfc2be715d8c463cb4d09c5d8" Dec 05 17:48:20 crc kubenswrapper[4753]: E1205 17:48:20.721891 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 17:48:30 crc kubenswrapper[4753]: I1205 17:48:30.294237 4753 generic.go:334] "Generic (PLEG): container finished" podID="5678941d-59cd-487e-82a4-f2cf0bf528a7" containerID="536643605b9875db45544b905da29919e4c08faccb806dce63f7d0b7d8d9c59e" exitCode=0 Dec 05 17:48:30 crc kubenswrapper[4753]: I1205 17:48:30.294324 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-7gbc5" event={"ID":"5678941d-59cd-487e-82a4-f2cf0bf528a7","Type":"ContainerDied","Data":"536643605b9875db45544b905da29919e4c08faccb806dce63f7d0b7d8d9c59e"} Dec 05 17:48:31 crc kubenswrapper[4753]: I1205 17:48:31.840882 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-7gbc5" Dec 05 17:48:31 crc kubenswrapper[4753]: I1205 17:48:31.940109 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fhhnp\" (UniqueName: \"kubernetes.io/projected/5678941d-59cd-487e-82a4-f2cf0bf528a7-kube-api-access-fhhnp\") pod \"5678941d-59cd-487e-82a4-f2cf0bf528a7\" (UID: \"5678941d-59cd-487e-82a4-f2cf0bf528a7\") " Dec 05 17:48:31 crc kubenswrapper[4753]: I1205 17:48:31.940172 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5678941d-59cd-487e-82a4-f2cf0bf528a7-ssh-key\") pod \"5678941d-59cd-487e-82a4-f2cf0bf528a7\" (UID: \"5678941d-59cd-487e-82a4-f2cf0bf528a7\") " Dec 05 17:48:31 crc kubenswrapper[4753]: I1205 17:48:31.940249 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5678941d-59cd-487e-82a4-f2cf0bf528a7-inventory\") pod \"5678941d-59cd-487e-82a4-f2cf0bf528a7\" (UID: \"5678941d-59cd-487e-82a4-f2cf0bf528a7\") " Dec 05 17:48:31 crc kubenswrapper[4753]: I1205 17:48:31.946496 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5678941d-59cd-487e-82a4-f2cf0bf528a7-kube-api-access-fhhnp" (OuterVolumeSpecName: "kube-api-access-fhhnp") pod "5678941d-59cd-487e-82a4-f2cf0bf528a7" (UID: "5678941d-59cd-487e-82a4-f2cf0bf528a7"). InnerVolumeSpecName "kube-api-access-fhhnp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:48:31 crc kubenswrapper[4753]: I1205 17:48:31.968452 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5678941d-59cd-487e-82a4-f2cf0bf528a7-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "5678941d-59cd-487e-82a4-f2cf0bf528a7" (UID: "5678941d-59cd-487e-82a4-f2cf0bf528a7"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:48:31 crc kubenswrapper[4753]: I1205 17:48:31.970924 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5678941d-59cd-487e-82a4-f2cf0bf528a7-inventory" (OuterVolumeSpecName: "inventory") pod "5678941d-59cd-487e-82a4-f2cf0bf528a7" (UID: "5678941d-59cd-487e-82a4-f2cf0bf528a7"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:48:32 crc kubenswrapper[4753]: I1205 17:48:32.042880 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fhhnp\" (UniqueName: \"kubernetes.io/projected/5678941d-59cd-487e-82a4-f2cf0bf528a7-kube-api-access-fhhnp\") on node \"crc\" DevicePath \"\"" Dec 05 17:48:32 crc kubenswrapper[4753]: I1205 17:48:32.042908 4753 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5678941d-59cd-487e-82a4-f2cf0bf528a7-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 17:48:32 crc kubenswrapper[4753]: I1205 17:48:32.042919 4753 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5678941d-59cd-487e-82a4-f2cf0bf528a7-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 17:48:32 crc kubenswrapper[4753]: I1205 17:48:32.319176 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-7gbc5" event={"ID":"5678941d-59cd-487e-82a4-f2cf0bf528a7","Type":"ContainerDied","Data":"1f4c89bfb8fff660e020e847414e07fa0bbbc183f86b865e2b1e59ec6d94d367"} Dec 05 17:48:32 crc kubenswrapper[4753]: I1205 17:48:32.319616 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1f4c89bfb8fff660e020e847414e07fa0bbbc183f86b865e2b1e59ec6d94d367" Dec 05 17:48:32 crc kubenswrapper[4753]: I1205 17:48:32.319273 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-7gbc5" Dec 05 17:48:32 crc kubenswrapper[4753]: I1205 17:48:32.419241 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-sgwrx"] Dec 05 17:48:32 crc kubenswrapper[4753]: E1205 17:48:32.419767 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5678941d-59cd-487e-82a4-f2cf0bf528a7" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 05 17:48:32 crc kubenswrapper[4753]: I1205 17:48:32.419788 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="5678941d-59cd-487e-82a4-f2cf0bf528a7" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 05 17:48:32 crc kubenswrapper[4753]: I1205 17:48:32.420074 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="5678941d-59cd-487e-82a4-f2cf0bf528a7" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 05 17:48:32 crc kubenswrapper[4753]: I1205 17:48:32.421026 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-sgwrx" Dec 05 17:48:32 crc kubenswrapper[4753]: I1205 17:48:32.423932 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 17:48:32 crc kubenswrapper[4753]: I1205 17:48:32.423941 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 17:48:32 crc kubenswrapper[4753]: I1205 17:48:32.424263 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 17:48:32 crc kubenswrapper[4753]: I1205 17:48:32.425387 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-2445p" Dec 05 17:48:32 crc kubenswrapper[4753]: I1205 17:48:32.437628 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-sgwrx"] Dec 05 17:48:32 crc kubenswrapper[4753]: I1205 17:48:32.555042 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h7dbg\" (UniqueName: \"kubernetes.io/projected/476af6d7-20f2-4345-96ad-219ab22e904b-kube-api-access-h7dbg\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-sgwrx\" (UID: \"476af6d7-20f2-4345-96ad-219ab22e904b\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-sgwrx" Dec 05 17:48:32 crc kubenswrapper[4753]: I1205 17:48:32.555197 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/476af6d7-20f2-4345-96ad-219ab22e904b-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-sgwrx\" (UID: \"476af6d7-20f2-4345-96ad-219ab22e904b\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-sgwrx" Dec 05 17:48:32 crc kubenswrapper[4753]: I1205 17:48:32.555267 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/476af6d7-20f2-4345-96ad-219ab22e904b-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-sgwrx\" (UID: \"476af6d7-20f2-4345-96ad-219ab22e904b\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-sgwrx" Dec 05 17:48:32 crc kubenswrapper[4753]: I1205 17:48:32.657436 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h7dbg\" (UniqueName: \"kubernetes.io/projected/476af6d7-20f2-4345-96ad-219ab22e904b-kube-api-access-h7dbg\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-sgwrx\" (UID: \"476af6d7-20f2-4345-96ad-219ab22e904b\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-sgwrx" Dec 05 17:48:32 crc kubenswrapper[4753]: I1205 17:48:32.657523 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/476af6d7-20f2-4345-96ad-219ab22e904b-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-sgwrx\" (UID: \"476af6d7-20f2-4345-96ad-219ab22e904b\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-sgwrx" Dec 05 17:48:32 crc kubenswrapper[4753]: I1205 17:48:32.657554 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/476af6d7-20f2-4345-96ad-219ab22e904b-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-sgwrx\" (UID: \"476af6d7-20f2-4345-96ad-219ab22e904b\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-sgwrx" Dec 05 17:48:32 crc kubenswrapper[4753]: I1205 17:48:32.661920 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/476af6d7-20f2-4345-96ad-219ab22e904b-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-sgwrx\" (UID: \"476af6d7-20f2-4345-96ad-219ab22e904b\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-sgwrx" Dec 05 17:48:32 crc kubenswrapper[4753]: I1205 17:48:32.662053 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/476af6d7-20f2-4345-96ad-219ab22e904b-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-sgwrx\" (UID: \"476af6d7-20f2-4345-96ad-219ab22e904b\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-sgwrx" Dec 05 17:48:32 crc kubenswrapper[4753]: I1205 17:48:32.673724 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h7dbg\" (UniqueName: \"kubernetes.io/projected/476af6d7-20f2-4345-96ad-219ab22e904b-kube-api-access-h7dbg\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-sgwrx\" (UID: \"476af6d7-20f2-4345-96ad-219ab22e904b\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-sgwrx" Dec 05 17:48:32 crc kubenswrapper[4753]: I1205 17:48:32.720613 4753 scope.go:117] "RemoveContainer" containerID="1a837c74d8c8977a86c74839c544fee61e72d58cfc2be715d8c463cb4d09c5d8" Dec 05 17:48:32 crc kubenswrapper[4753]: E1205 17:48:32.721308 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 17:48:32 crc kubenswrapper[4753]: I1205 17:48:32.741925 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-sgwrx" Dec 05 17:48:33 crc kubenswrapper[4753]: I1205 17:48:33.315774 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-sgwrx"] Dec 05 17:48:33 crc kubenswrapper[4753]: I1205 17:48:33.319168 4753 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 17:48:33 crc kubenswrapper[4753]: I1205 17:48:33.339782 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-sgwrx" event={"ID":"476af6d7-20f2-4345-96ad-219ab22e904b","Type":"ContainerStarted","Data":"adb987c215f7d6f87bc9a770058175f960cd661bafd18541a9ae37cb25a4fa1b"} Dec 05 17:48:34 crc kubenswrapper[4753]: I1205 17:48:34.357395 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-sgwrx" event={"ID":"476af6d7-20f2-4345-96ad-219ab22e904b","Type":"ContainerStarted","Data":"871dac2ccbb188ab0ef45db2eccda4c9ab800cb0ad0aa414bdaa68f5b050d2d6"} Dec 05 17:48:34 crc kubenswrapper[4753]: I1205 17:48:34.385728 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-sgwrx" podStartSLOduration=1.9780660270000001 podStartE2EDuration="2.385710808s" podCreationTimestamp="2025-12-05 17:48:32 +0000 UTC" firstStartedPulling="2025-12-05 17:48:33.318856812 +0000 UTC m=+2651.821963818" lastFinishedPulling="2025-12-05 17:48:33.726501593 +0000 UTC m=+2652.229608599" observedRunningTime="2025-12-05 17:48:34.379923034 +0000 UTC m=+2652.883030040" watchObservedRunningTime="2025-12-05 17:48:34.385710808 +0000 UTC m=+2652.888817814" Dec 05 17:48:39 crc kubenswrapper[4753]: I1205 17:48:39.055108 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-db-sync-9mzx8"] Dec 05 17:48:39 crc kubenswrapper[4753]: I1205 17:48:39.063509 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cloudkitty-db-sync-9mzx8"] Dec 05 17:48:39 crc kubenswrapper[4753]: I1205 17:48:39.732955 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d52c3870-a8b0-43f1-ac03-2e7c0015a5f7" path="/var/lib/kubelet/pods/d52c3870-a8b0-43f1-ac03-2e7c0015a5f7/volumes" Dec 05 17:48:44 crc kubenswrapper[4753]: I1205 17:48:44.722108 4753 scope.go:117] "RemoveContainer" containerID="1a837c74d8c8977a86c74839c544fee61e72d58cfc2be715d8c463cb4d09c5d8" Dec 05 17:48:44 crc kubenswrapper[4753]: E1205 17:48:44.723801 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 17:48:45 crc kubenswrapper[4753]: I1205 17:48:45.040816 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cloudkitty-storageinit-sf7pd"] Dec 05 17:48:45 crc kubenswrapper[4753]: I1205 17:48:45.051229 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cloudkitty-storageinit-sf7pd"] Dec 05 17:48:45 crc kubenswrapper[4753]: I1205 17:48:45.762642 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="369dd235-0113-4868-9146-9dcc60c10f63" path="/var/lib/kubelet/pods/369dd235-0113-4868-9146-9dcc60c10f63/volumes" Dec 05 17:48:59 crc kubenswrapper[4753]: I1205 17:48:59.721331 4753 scope.go:117] "RemoveContainer" containerID="1a837c74d8c8977a86c74839c544fee61e72d58cfc2be715d8c463cb4d09c5d8" Dec 05 17:48:59 crc kubenswrapper[4753]: E1205 17:48:59.722306 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 17:49:10 crc kubenswrapper[4753]: I1205 17:49:10.722340 4753 scope.go:117] "RemoveContainer" containerID="1a837c74d8c8977a86c74839c544fee61e72d58cfc2be715d8c463cb4d09c5d8" Dec 05 17:49:10 crc kubenswrapper[4753]: E1205 17:49:10.724689 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 17:49:23 crc kubenswrapper[4753]: I1205 17:49:23.720838 4753 scope.go:117] "RemoveContainer" containerID="1a837c74d8c8977a86c74839c544fee61e72d58cfc2be715d8c463cb4d09c5d8" Dec 05 17:49:23 crc kubenswrapper[4753]: E1205 17:49:23.721767 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 17:49:24 crc kubenswrapper[4753]: I1205 17:49:24.876295 4753 generic.go:334] "Generic (PLEG): container finished" podID="476af6d7-20f2-4345-96ad-219ab22e904b" containerID="871dac2ccbb188ab0ef45db2eccda4c9ab800cb0ad0aa414bdaa68f5b050d2d6" exitCode=0 Dec 05 17:49:24 crc kubenswrapper[4753]: I1205 17:49:24.876341 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-sgwrx" event={"ID":"476af6d7-20f2-4345-96ad-219ab22e904b","Type":"ContainerDied","Data":"871dac2ccbb188ab0ef45db2eccda4c9ab800cb0ad0aa414bdaa68f5b050d2d6"} Dec 05 17:49:26 crc kubenswrapper[4753]: I1205 17:49:26.391310 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-sgwrx" Dec 05 17:49:26 crc kubenswrapper[4753]: I1205 17:49:26.505880 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/476af6d7-20f2-4345-96ad-219ab22e904b-ssh-key\") pod \"476af6d7-20f2-4345-96ad-219ab22e904b\" (UID: \"476af6d7-20f2-4345-96ad-219ab22e904b\") " Dec 05 17:49:26 crc kubenswrapper[4753]: I1205 17:49:26.505926 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h7dbg\" (UniqueName: \"kubernetes.io/projected/476af6d7-20f2-4345-96ad-219ab22e904b-kube-api-access-h7dbg\") pod \"476af6d7-20f2-4345-96ad-219ab22e904b\" (UID: \"476af6d7-20f2-4345-96ad-219ab22e904b\") " Dec 05 17:49:26 crc kubenswrapper[4753]: I1205 17:49:26.506043 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/476af6d7-20f2-4345-96ad-219ab22e904b-inventory\") pod \"476af6d7-20f2-4345-96ad-219ab22e904b\" (UID: \"476af6d7-20f2-4345-96ad-219ab22e904b\") " Dec 05 17:49:26 crc kubenswrapper[4753]: I1205 17:49:26.514014 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/476af6d7-20f2-4345-96ad-219ab22e904b-kube-api-access-h7dbg" (OuterVolumeSpecName: "kube-api-access-h7dbg") pod "476af6d7-20f2-4345-96ad-219ab22e904b" (UID: "476af6d7-20f2-4345-96ad-219ab22e904b"). InnerVolumeSpecName "kube-api-access-h7dbg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:49:26 crc kubenswrapper[4753]: I1205 17:49:26.543193 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/476af6d7-20f2-4345-96ad-219ab22e904b-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "476af6d7-20f2-4345-96ad-219ab22e904b" (UID: "476af6d7-20f2-4345-96ad-219ab22e904b"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:49:26 crc kubenswrapper[4753]: I1205 17:49:26.545537 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/476af6d7-20f2-4345-96ad-219ab22e904b-inventory" (OuterVolumeSpecName: "inventory") pod "476af6d7-20f2-4345-96ad-219ab22e904b" (UID: "476af6d7-20f2-4345-96ad-219ab22e904b"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:49:26 crc kubenswrapper[4753]: I1205 17:49:26.608556 4753 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/476af6d7-20f2-4345-96ad-219ab22e904b-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 17:49:26 crc kubenswrapper[4753]: I1205 17:49:26.608592 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h7dbg\" (UniqueName: \"kubernetes.io/projected/476af6d7-20f2-4345-96ad-219ab22e904b-kube-api-access-h7dbg\") on node \"crc\" DevicePath \"\"" Dec 05 17:49:26 crc kubenswrapper[4753]: I1205 17:49:26.608607 4753 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/476af6d7-20f2-4345-96ad-219ab22e904b-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 17:49:26 crc kubenswrapper[4753]: I1205 17:49:26.902542 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-sgwrx" event={"ID":"476af6d7-20f2-4345-96ad-219ab22e904b","Type":"ContainerDied","Data":"adb987c215f7d6f87bc9a770058175f960cd661bafd18541a9ae37cb25a4fa1b"} Dec 05 17:49:26 crc kubenswrapper[4753]: I1205 17:49:26.902591 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="adb987c215f7d6f87bc9a770058175f960cd661bafd18541a9ae37cb25a4fa1b" Dec 05 17:49:26 crc kubenswrapper[4753]: I1205 17:49:26.903097 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-sgwrx" Dec 05 17:49:26 crc kubenswrapper[4753]: I1205 17:49:26.994266 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-lwtd6"] Dec 05 17:49:26 crc kubenswrapper[4753]: E1205 17:49:26.994679 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="476af6d7-20f2-4345-96ad-219ab22e904b" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 05 17:49:26 crc kubenswrapper[4753]: I1205 17:49:26.994696 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="476af6d7-20f2-4345-96ad-219ab22e904b" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 05 17:49:26 crc kubenswrapper[4753]: I1205 17:49:26.994932 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="476af6d7-20f2-4345-96ad-219ab22e904b" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 05 17:49:26 crc kubenswrapper[4753]: I1205 17:49:26.995880 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-lwtd6" Dec 05 17:49:26 crc kubenswrapper[4753]: I1205 17:49:26.997766 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 17:49:26 crc kubenswrapper[4753]: I1205 17:49:26.999536 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-2445p" Dec 05 17:49:27 crc kubenswrapper[4753]: I1205 17:49:27.000121 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 17:49:27 crc kubenswrapper[4753]: I1205 17:49:27.001728 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 17:49:27 crc kubenswrapper[4753]: I1205 17:49:27.004381 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-lwtd6"] Dec 05 17:49:27 crc kubenswrapper[4753]: I1205 17:49:27.122195 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/7e1f8581-434d-4c92-aa86-f76aa242a2e2-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-lwtd6\" (UID: \"7e1f8581-434d-4c92-aa86-f76aa242a2e2\") " pod="openstack/ssh-known-hosts-edpm-deployment-lwtd6" Dec 05 17:49:27 crc kubenswrapper[4753]: I1205 17:49:27.122348 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z6bs9\" (UniqueName: \"kubernetes.io/projected/7e1f8581-434d-4c92-aa86-f76aa242a2e2-kube-api-access-z6bs9\") pod \"ssh-known-hosts-edpm-deployment-lwtd6\" (UID: \"7e1f8581-434d-4c92-aa86-f76aa242a2e2\") " pod="openstack/ssh-known-hosts-edpm-deployment-lwtd6" Dec 05 17:49:27 crc kubenswrapper[4753]: I1205 17:49:27.122455 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/7e1f8581-434d-4c92-aa86-f76aa242a2e2-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-lwtd6\" (UID: \"7e1f8581-434d-4c92-aa86-f76aa242a2e2\") " pod="openstack/ssh-known-hosts-edpm-deployment-lwtd6" Dec 05 17:49:27 crc kubenswrapper[4753]: I1205 17:49:27.225337 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/7e1f8581-434d-4c92-aa86-f76aa242a2e2-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-lwtd6\" (UID: \"7e1f8581-434d-4c92-aa86-f76aa242a2e2\") " pod="openstack/ssh-known-hosts-edpm-deployment-lwtd6" Dec 05 17:49:27 crc kubenswrapper[4753]: I1205 17:49:27.225412 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z6bs9\" (UniqueName: \"kubernetes.io/projected/7e1f8581-434d-4c92-aa86-f76aa242a2e2-kube-api-access-z6bs9\") pod \"ssh-known-hosts-edpm-deployment-lwtd6\" (UID: \"7e1f8581-434d-4c92-aa86-f76aa242a2e2\") " pod="openstack/ssh-known-hosts-edpm-deployment-lwtd6" Dec 05 17:49:27 crc kubenswrapper[4753]: I1205 17:49:27.225455 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/7e1f8581-434d-4c92-aa86-f76aa242a2e2-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-lwtd6\" (UID: \"7e1f8581-434d-4c92-aa86-f76aa242a2e2\") " pod="openstack/ssh-known-hosts-edpm-deployment-lwtd6" Dec 05 17:49:27 crc kubenswrapper[4753]: I1205 17:49:27.230332 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/7e1f8581-434d-4c92-aa86-f76aa242a2e2-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-lwtd6\" (UID: \"7e1f8581-434d-4c92-aa86-f76aa242a2e2\") " pod="openstack/ssh-known-hosts-edpm-deployment-lwtd6" Dec 05 17:49:27 crc kubenswrapper[4753]: I1205 17:49:27.233215 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/7e1f8581-434d-4c92-aa86-f76aa242a2e2-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-lwtd6\" (UID: \"7e1f8581-434d-4c92-aa86-f76aa242a2e2\") " pod="openstack/ssh-known-hosts-edpm-deployment-lwtd6" Dec 05 17:49:27 crc kubenswrapper[4753]: I1205 17:49:27.249797 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z6bs9\" (UniqueName: \"kubernetes.io/projected/7e1f8581-434d-4c92-aa86-f76aa242a2e2-kube-api-access-z6bs9\") pod \"ssh-known-hosts-edpm-deployment-lwtd6\" (UID: \"7e1f8581-434d-4c92-aa86-f76aa242a2e2\") " pod="openstack/ssh-known-hosts-edpm-deployment-lwtd6" Dec 05 17:49:27 crc kubenswrapper[4753]: I1205 17:49:27.320935 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-lwtd6" Dec 05 17:49:27 crc kubenswrapper[4753]: I1205 17:49:27.926538 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-lwtd6"] Dec 05 17:49:28 crc kubenswrapper[4753]: I1205 17:49:28.920125 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-lwtd6" event={"ID":"7e1f8581-434d-4c92-aa86-f76aa242a2e2","Type":"ContainerStarted","Data":"7ba9c709c09e964599f0b4487553d93f389c2f8ecf12a645bad40dbc940463e7"} Dec 05 17:49:28 crc kubenswrapper[4753]: I1205 17:49:28.920464 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-lwtd6" event={"ID":"7e1f8581-434d-4c92-aa86-f76aa242a2e2","Type":"ContainerStarted","Data":"ca7a397d81a57327cd6bf6f243754d50442f7516227c0bec8748f91ae1b3101e"} Dec 05 17:49:28 crc kubenswrapper[4753]: I1205 17:49:28.942395 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-edpm-deployment-lwtd6" podStartSLOduration=2.325640473 podStartE2EDuration="2.942374868s" podCreationTimestamp="2025-12-05 17:49:26 +0000 UTC" firstStartedPulling="2025-12-05 17:49:27.931051851 +0000 UTC m=+2706.434158867" lastFinishedPulling="2025-12-05 17:49:28.547786266 +0000 UTC m=+2707.050893262" observedRunningTime="2025-12-05 17:49:28.936368018 +0000 UTC m=+2707.439475024" watchObservedRunningTime="2025-12-05 17:49:28.942374868 +0000 UTC m=+2707.445481884" Dec 05 17:49:31 crc kubenswrapper[4753]: I1205 17:49:31.444519 4753 scope.go:117] "RemoveContainer" containerID="417d313f097ad56274390b453a4c4d43754be2e637523d7ab673cff2812db7f0" Dec 05 17:49:31 crc kubenswrapper[4753]: I1205 17:49:31.487230 4753 scope.go:117] "RemoveContainer" containerID="e44ccefb45d57b43018522f1a15e6bc64717eb635b58390e86a3a618e417f280" Dec 05 17:49:35 crc kubenswrapper[4753]: I1205 17:49:35.997717 4753 generic.go:334] "Generic (PLEG): container finished" podID="7e1f8581-434d-4c92-aa86-f76aa242a2e2" containerID="7ba9c709c09e964599f0b4487553d93f389c2f8ecf12a645bad40dbc940463e7" exitCode=0 Dec 05 17:49:35 crc kubenswrapper[4753]: I1205 17:49:35.997793 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-lwtd6" event={"ID":"7e1f8581-434d-4c92-aa86-f76aa242a2e2","Type":"ContainerDied","Data":"7ba9c709c09e964599f0b4487553d93f389c2f8ecf12a645bad40dbc940463e7"} Dec 05 17:49:37 crc kubenswrapper[4753]: I1205 17:49:37.627764 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-lwtd6" Dec 05 17:49:37 crc kubenswrapper[4753]: I1205 17:49:37.723941 4753 scope.go:117] "RemoveContainer" containerID="1a837c74d8c8977a86c74839c544fee61e72d58cfc2be715d8c463cb4d09c5d8" Dec 05 17:49:37 crc kubenswrapper[4753]: I1205 17:49:37.765147 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z6bs9\" (UniqueName: \"kubernetes.io/projected/7e1f8581-434d-4c92-aa86-f76aa242a2e2-kube-api-access-z6bs9\") pod \"7e1f8581-434d-4c92-aa86-f76aa242a2e2\" (UID: \"7e1f8581-434d-4c92-aa86-f76aa242a2e2\") " Dec 05 17:49:37 crc kubenswrapper[4753]: I1205 17:49:37.765836 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/7e1f8581-434d-4c92-aa86-f76aa242a2e2-ssh-key-openstack-edpm-ipam\") pod \"7e1f8581-434d-4c92-aa86-f76aa242a2e2\" (UID: \"7e1f8581-434d-4c92-aa86-f76aa242a2e2\") " Dec 05 17:49:37 crc kubenswrapper[4753]: I1205 17:49:37.765983 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/7e1f8581-434d-4c92-aa86-f76aa242a2e2-inventory-0\") pod \"7e1f8581-434d-4c92-aa86-f76aa242a2e2\" (UID: \"7e1f8581-434d-4c92-aa86-f76aa242a2e2\") " Dec 05 17:49:37 crc kubenswrapper[4753]: I1205 17:49:37.771725 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7e1f8581-434d-4c92-aa86-f76aa242a2e2-kube-api-access-z6bs9" (OuterVolumeSpecName: "kube-api-access-z6bs9") pod "7e1f8581-434d-4c92-aa86-f76aa242a2e2" (UID: "7e1f8581-434d-4c92-aa86-f76aa242a2e2"). InnerVolumeSpecName "kube-api-access-z6bs9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:49:37 crc kubenswrapper[4753]: I1205 17:49:37.802830 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e1f8581-434d-4c92-aa86-f76aa242a2e2-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "7e1f8581-434d-4c92-aa86-f76aa242a2e2" (UID: "7e1f8581-434d-4c92-aa86-f76aa242a2e2"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:49:37 crc kubenswrapper[4753]: I1205 17:49:37.817922 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e1f8581-434d-4c92-aa86-f76aa242a2e2-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "7e1f8581-434d-4c92-aa86-f76aa242a2e2" (UID: "7e1f8581-434d-4c92-aa86-f76aa242a2e2"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:49:37 crc kubenswrapper[4753]: I1205 17:49:37.869694 4753 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/7e1f8581-434d-4c92-aa86-f76aa242a2e2-inventory-0\") on node \"crc\" DevicePath \"\"" Dec 05 17:49:37 crc kubenswrapper[4753]: I1205 17:49:37.869734 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z6bs9\" (UniqueName: \"kubernetes.io/projected/7e1f8581-434d-4c92-aa86-f76aa242a2e2-kube-api-access-z6bs9\") on node \"crc\" DevicePath \"\"" Dec 05 17:49:37 crc kubenswrapper[4753]: I1205 17:49:37.869751 4753 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/7e1f8581-434d-4c92-aa86-f76aa242a2e2-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Dec 05 17:49:38 crc kubenswrapper[4753]: I1205 17:49:38.023933 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-lwtd6" event={"ID":"7e1f8581-434d-4c92-aa86-f76aa242a2e2","Type":"ContainerDied","Data":"ca7a397d81a57327cd6bf6f243754d50442f7516227c0bec8748f91ae1b3101e"} Dec 05 17:49:38 crc kubenswrapper[4753]: I1205 17:49:38.023997 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ca7a397d81a57327cd6bf6f243754d50442f7516227c0bec8748f91ae1b3101e" Dec 05 17:49:38 crc kubenswrapper[4753]: I1205 17:49:38.024078 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-lwtd6" Dec 05 17:49:38 crc kubenswrapper[4753]: I1205 17:49:38.108670 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-6z7rw"] Dec 05 17:49:38 crc kubenswrapper[4753]: E1205 17:49:38.109093 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e1f8581-434d-4c92-aa86-f76aa242a2e2" containerName="ssh-known-hosts-edpm-deployment" Dec 05 17:49:38 crc kubenswrapper[4753]: I1205 17:49:38.109110 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e1f8581-434d-4c92-aa86-f76aa242a2e2" containerName="ssh-known-hosts-edpm-deployment" Dec 05 17:49:38 crc kubenswrapper[4753]: I1205 17:49:38.109372 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="7e1f8581-434d-4c92-aa86-f76aa242a2e2" containerName="ssh-known-hosts-edpm-deployment" Dec 05 17:49:38 crc kubenswrapper[4753]: I1205 17:49:38.110097 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6z7rw" Dec 05 17:49:38 crc kubenswrapper[4753]: I1205 17:49:38.112231 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 17:49:38 crc kubenswrapper[4753]: I1205 17:49:38.113392 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-2445p" Dec 05 17:49:38 crc kubenswrapper[4753]: I1205 17:49:38.113508 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 17:49:38 crc kubenswrapper[4753]: I1205 17:49:38.116673 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 17:49:38 crc kubenswrapper[4753]: I1205 17:49:38.131326 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-6z7rw"] Dec 05 17:49:38 crc kubenswrapper[4753]: I1205 17:49:38.276679 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/21886288-cce7-4e89-8c64-e4f06623f8f3-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-6z7rw\" (UID: \"21886288-cce7-4e89-8c64-e4f06623f8f3\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6z7rw" Dec 05 17:49:38 crc kubenswrapper[4753]: I1205 17:49:38.277127 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/21886288-cce7-4e89-8c64-e4f06623f8f3-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-6z7rw\" (UID: \"21886288-cce7-4e89-8c64-e4f06623f8f3\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6z7rw" Dec 05 17:49:38 crc kubenswrapper[4753]: I1205 17:49:38.277232 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zzs4d\" (UniqueName: \"kubernetes.io/projected/21886288-cce7-4e89-8c64-e4f06623f8f3-kube-api-access-zzs4d\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-6z7rw\" (UID: \"21886288-cce7-4e89-8c64-e4f06623f8f3\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6z7rw" Dec 05 17:49:38 crc kubenswrapper[4753]: I1205 17:49:38.378841 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/21886288-cce7-4e89-8c64-e4f06623f8f3-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-6z7rw\" (UID: \"21886288-cce7-4e89-8c64-e4f06623f8f3\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6z7rw" Dec 05 17:49:38 crc kubenswrapper[4753]: I1205 17:49:38.379121 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/21886288-cce7-4e89-8c64-e4f06623f8f3-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-6z7rw\" (UID: \"21886288-cce7-4e89-8c64-e4f06623f8f3\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6z7rw" Dec 05 17:49:38 crc kubenswrapper[4753]: I1205 17:49:38.379201 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zzs4d\" (UniqueName: \"kubernetes.io/projected/21886288-cce7-4e89-8c64-e4f06623f8f3-kube-api-access-zzs4d\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-6z7rw\" (UID: \"21886288-cce7-4e89-8c64-e4f06623f8f3\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6z7rw" Dec 05 17:49:38 crc kubenswrapper[4753]: I1205 17:49:38.383589 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/21886288-cce7-4e89-8c64-e4f06623f8f3-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-6z7rw\" (UID: \"21886288-cce7-4e89-8c64-e4f06623f8f3\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6z7rw" Dec 05 17:49:38 crc kubenswrapper[4753]: I1205 17:49:38.383675 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/21886288-cce7-4e89-8c64-e4f06623f8f3-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-6z7rw\" (UID: \"21886288-cce7-4e89-8c64-e4f06623f8f3\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6z7rw" Dec 05 17:49:38 crc kubenswrapper[4753]: I1205 17:49:38.402044 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zzs4d\" (UniqueName: \"kubernetes.io/projected/21886288-cce7-4e89-8c64-e4f06623f8f3-kube-api-access-zzs4d\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-6z7rw\" (UID: \"21886288-cce7-4e89-8c64-e4f06623f8f3\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6z7rw" Dec 05 17:49:38 crc kubenswrapper[4753]: I1205 17:49:38.431410 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6z7rw" Dec 05 17:49:39 crc kubenswrapper[4753]: I1205 17:49:39.045257 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" event={"ID":"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68","Type":"ContainerStarted","Data":"5828e66b356a3207c12e78ba1c90384790225a61c83c3ec4cd9142584bace274"} Dec 05 17:49:39 crc kubenswrapper[4753]: I1205 17:49:39.055774 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-6z7rw"] Dec 05 17:49:40 crc kubenswrapper[4753]: I1205 17:49:40.060796 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6z7rw" event={"ID":"21886288-cce7-4e89-8c64-e4f06623f8f3","Type":"ContainerStarted","Data":"c9af1ba8c36157fc29d818cd02341c354dc895c9a526b4ce46ea85d7dbe1d37a"} Dec 05 17:49:40 crc kubenswrapper[4753]: I1205 17:49:40.061639 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6z7rw" event={"ID":"21886288-cce7-4e89-8c64-e4f06623f8f3","Type":"ContainerStarted","Data":"fc3c0bbab4dc739e982297e1082981b4b353cb10d2837f8c5672f5270b0df874"} Dec 05 17:49:40 crc kubenswrapper[4753]: I1205 17:49:40.100670 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6z7rw" podStartSLOduration=1.691673149 podStartE2EDuration="2.100643908s" podCreationTimestamp="2025-12-05 17:49:38 +0000 UTC" firstStartedPulling="2025-12-05 17:49:39.031127845 +0000 UTC m=+2717.534234861" lastFinishedPulling="2025-12-05 17:49:39.440098604 +0000 UTC m=+2717.943205620" observedRunningTime="2025-12-05 17:49:40.089819722 +0000 UTC m=+2718.592926728" watchObservedRunningTime="2025-12-05 17:49:40.100643908 +0000 UTC m=+2718.603750954" Dec 05 17:49:48 crc kubenswrapper[4753]: I1205 17:49:48.139915 4753 generic.go:334] "Generic (PLEG): container finished" podID="21886288-cce7-4e89-8c64-e4f06623f8f3" containerID="c9af1ba8c36157fc29d818cd02341c354dc895c9a526b4ce46ea85d7dbe1d37a" exitCode=0 Dec 05 17:49:48 crc kubenswrapper[4753]: I1205 17:49:48.140022 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6z7rw" event={"ID":"21886288-cce7-4e89-8c64-e4f06623f8f3","Type":"ContainerDied","Data":"c9af1ba8c36157fc29d818cd02341c354dc895c9a526b4ce46ea85d7dbe1d37a"} Dec 05 17:49:49 crc kubenswrapper[4753]: I1205 17:49:49.788365 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6z7rw" Dec 05 17:49:49 crc kubenswrapper[4753]: I1205 17:49:49.962917 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/21886288-cce7-4e89-8c64-e4f06623f8f3-inventory\") pod \"21886288-cce7-4e89-8c64-e4f06623f8f3\" (UID: \"21886288-cce7-4e89-8c64-e4f06623f8f3\") " Dec 05 17:49:49 crc kubenswrapper[4753]: I1205 17:49:49.963052 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zzs4d\" (UniqueName: \"kubernetes.io/projected/21886288-cce7-4e89-8c64-e4f06623f8f3-kube-api-access-zzs4d\") pod \"21886288-cce7-4e89-8c64-e4f06623f8f3\" (UID: \"21886288-cce7-4e89-8c64-e4f06623f8f3\") " Dec 05 17:49:49 crc kubenswrapper[4753]: I1205 17:49:49.963366 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/21886288-cce7-4e89-8c64-e4f06623f8f3-ssh-key\") pod \"21886288-cce7-4e89-8c64-e4f06623f8f3\" (UID: \"21886288-cce7-4e89-8c64-e4f06623f8f3\") " Dec 05 17:49:49 crc kubenswrapper[4753]: I1205 17:49:49.970246 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/21886288-cce7-4e89-8c64-e4f06623f8f3-kube-api-access-zzs4d" (OuterVolumeSpecName: "kube-api-access-zzs4d") pod "21886288-cce7-4e89-8c64-e4f06623f8f3" (UID: "21886288-cce7-4e89-8c64-e4f06623f8f3"). InnerVolumeSpecName "kube-api-access-zzs4d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:49:50 crc kubenswrapper[4753]: I1205 17:49:50.008965 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21886288-cce7-4e89-8c64-e4f06623f8f3-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "21886288-cce7-4e89-8c64-e4f06623f8f3" (UID: "21886288-cce7-4e89-8c64-e4f06623f8f3"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:49:50 crc kubenswrapper[4753]: I1205 17:49:50.009110 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21886288-cce7-4e89-8c64-e4f06623f8f3-inventory" (OuterVolumeSpecName: "inventory") pod "21886288-cce7-4e89-8c64-e4f06623f8f3" (UID: "21886288-cce7-4e89-8c64-e4f06623f8f3"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:49:50 crc kubenswrapper[4753]: I1205 17:49:50.065859 4753 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/21886288-cce7-4e89-8c64-e4f06623f8f3-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 17:49:50 crc kubenswrapper[4753]: I1205 17:49:50.065904 4753 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/21886288-cce7-4e89-8c64-e4f06623f8f3-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 17:49:50 crc kubenswrapper[4753]: I1205 17:49:50.065917 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zzs4d\" (UniqueName: \"kubernetes.io/projected/21886288-cce7-4e89-8c64-e4f06623f8f3-kube-api-access-zzs4d\") on node \"crc\" DevicePath \"\"" Dec 05 17:49:50 crc kubenswrapper[4753]: I1205 17:49:50.167802 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6z7rw" event={"ID":"21886288-cce7-4e89-8c64-e4f06623f8f3","Type":"ContainerDied","Data":"fc3c0bbab4dc739e982297e1082981b4b353cb10d2837f8c5672f5270b0df874"} Dec 05 17:49:50 crc kubenswrapper[4753]: I1205 17:49:50.168161 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fc3c0bbab4dc739e982297e1082981b4b353cb10d2837f8c5672f5270b0df874" Dec 05 17:49:50 crc kubenswrapper[4753]: I1205 17:49:50.167892 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-6z7rw" Dec 05 17:49:50 crc kubenswrapper[4753]: I1205 17:49:50.242138 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qwwlp"] Dec 05 17:49:50 crc kubenswrapper[4753]: E1205 17:49:50.242654 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21886288-cce7-4e89-8c64-e4f06623f8f3" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 05 17:49:50 crc kubenswrapper[4753]: I1205 17:49:50.242672 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="21886288-cce7-4e89-8c64-e4f06623f8f3" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 05 17:49:50 crc kubenswrapper[4753]: I1205 17:49:50.242874 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="21886288-cce7-4e89-8c64-e4f06623f8f3" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 05 17:49:50 crc kubenswrapper[4753]: I1205 17:49:50.243645 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qwwlp" Dec 05 17:49:50 crc kubenswrapper[4753]: I1205 17:49:50.245403 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 17:49:50 crc kubenswrapper[4753]: I1205 17:49:50.245683 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-2445p" Dec 05 17:49:50 crc kubenswrapper[4753]: I1205 17:49:50.245764 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 17:49:50 crc kubenswrapper[4753]: I1205 17:49:50.246318 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 17:49:50 crc kubenswrapper[4753]: I1205 17:49:50.255504 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qwwlp"] Dec 05 17:49:50 crc kubenswrapper[4753]: I1205 17:49:50.369967 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/87333285-bec9-4c68-b2f7-307fee899fe4-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-qwwlp\" (UID: \"87333285-bec9-4c68-b2f7-307fee899fe4\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qwwlp" Dec 05 17:49:50 crc kubenswrapper[4753]: I1205 17:49:50.370020 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jdn84\" (UniqueName: \"kubernetes.io/projected/87333285-bec9-4c68-b2f7-307fee899fe4-kube-api-access-jdn84\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-qwwlp\" (UID: \"87333285-bec9-4c68-b2f7-307fee899fe4\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qwwlp" Dec 05 17:49:50 crc kubenswrapper[4753]: I1205 17:49:50.370046 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/87333285-bec9-4c68-b2f7-307fee899fe4-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-qwwlp\" (UID: \"87333285-bec9-4c68-b2f7-307fee899fe4\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qwwlp" Dec 05 17:49:50 crc kubenswrapper[4753]: I1205 17:49:50.471759 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/87333285-bec9-4c68-b2f7-307fee899fe4-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-qwwlp\" (UID: \"87333285-bec9-4c68-b2f7-307fee899fe4\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qwwlp" Dec 05 17:49:50 crc kubenswrapper[4753]: I1205 17:49:50.471805 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jdn84\" (UniqueName: \"kubernetes.io/projected/87333285-bec9-4c68-b2f7-307fee899fe4-kube-api-access-jdn84\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-qwwlp\" (UID: \"87333285-bec9-4c68-b2f7-307fee899fe4\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qwwlp" Dec 05 17:49:50 crc kubenswrapper[4753]: I1205 17:49:50.471829 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/87333285-bec9-4c68-b2f7-307fee899fe4-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-qwwlp\" (UID: \"87333285-bec9-4c68-b2f7-307fee899fe4\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qwwlp" Dec 05 17:49:50 crc kubenswrapper[4753]: I1205 17:49:50.476905 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/87333285-bec9-4c68-b2f7-307fee899fe4-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-qwwlp\" (UID: \"87333285-bec9-4c68-b2f7-307fee899fe4\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qwwlp" Dec 05 17:49:50 crc kubenswrapper[4753]: I1205 17:49:50.488744 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/87333285-bec9-4c68-b2f7-307fee899fe4-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-qwwlp\" (UID: \"87333285-bec9-4c68-b2f7-307fee899fe4\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qwwlp" Dec 05 17:49:50 crc kubenswrapper[4753]: I1205 17:49:50.507652 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jdn84\" (UniqueName: \"kubernetes.io/projected/87333285-bec9-4c68-b2f7-307fee899fe4-kube-api-access-jdn84\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-qwwlp\" (UID: \"87333285-bec9-4c68-b2f7-307fee899fe4\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qwwlp" Dec 05 17:49:50 crc kubenswrapper[4753]: I1205 17:49:50.567580 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qwwlp" Dec 05 17:49:51 crc kubenswrapper[4753]: I1205 17:49:51.170765 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qwwlp"] Dec 05 17:49:51 crc kubenswrapper[4753]: I1205 17:49:51.177904 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qwwlp" event={"ID":"87333285-bec9-4c68-b2f7-307fee899fe4","Type":"ContainerStarted","Data":"ccc17f1a0530d198ca135a9018fabcd8251e8125fb9674577322d1b2fdbdd780"} Dec 05 17:49:52 crc kubenswrapper[4753]: I1205 17:49:52.190621 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qwwlp" event={"ID":"87333285-bec9-4c68-b2f7-307fee899fe4","Type":"ContainerStarted","Data":"84b1bfd0cd19173b2cbdd9cc1472c15ed545c2246c915db71525e690d485925b"} Dec 05 17:49:52 crc kubenswrapper[4753]: I1205 17:49:52.219225 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qwwlp" podStartSLOduration=1.7669050259999999 podStartE2EDuration="2.21920583s" podCreationTimestamp="2025-12-05 17:49:50 +0000 UTC" firstStartedPulling="2025-12-05 17:49:51.16251109 +0000 UTC m=+2729.665618136" lastFinishedPulling="2025-12-05 17:49:51.614811924 +0000 UTC m=+2730.117918940" observedRunningTime="2025-12-05 17:49:52.212599573 +0000 UTC m=+2730.715706599" watchObservedRunningTime="2025-12-05 17:49:52.21920583 +0000 UTC m=+2730.722312836" Dec 05 17:50:02 crc kubenswrapper[4753]: I1205 17:50:02.304008 4753 generic.go:334] "Generic (PLEG): container finished" podID="87333285-bec9-4c68-b2f7-307fee899fe4" containerID="84b1bfd0cd19173b2cbdd9cc1472c15ed545c2246c915db71525e690d485925b" exitCode=0 Dec 05 17:50:02 crc kubenswrapper[4753]: I1205 17:50:02.304169 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qwwlp" event={"ID":"87333285-bec9-4c68-b2f7-307fee899fe4","Type":"ContainerDied","Data":"84b1bfd0cd19173b2cbdd9cc1472c15ed545c2246c915db71525e690d485925b"} Dec 05 17:50:03 crc kubenswrapper[4753]: I1205 17:50:03.964757 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qwwlp" Dec 05 17:50:04 crc kubenswrapper[4753]: I1205 17:50:04.019453 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jdn84\" (UniqueName: \"kubernetes.io/projected/87333285-bec9-4c68-b2f7-307fee899fe4-kube-api-access-jdn84\") pod \"87333285-bec9-4c68-b2f7-307fee899fe4\" (UID: \"87333285-bec9-4c68-b2f7-307fee899fe4\") " Dec 05 17:50:04 crc kubenswrapper[4753]: I1205 17:50:04.019674 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/87333285-bec9-4c68-b2f7-307fee899fe4-inventory\") pod \"87333285-bec9-4c68-b2f7-307fee899fe4\" (UID: \"87333285-bec9-4c68-b2f7-307fee899fe4\") " Dec 05 17:50:04 crc kubenswrapper[4753]: I1205 17:50:04.019767 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/87333285-bec9-4c68-b2f7-307fee899fe4-ssh-key\") pod \"87333285-bec9-4c68-b2f7-307fee899fe4\" (UID: \"87333285-bec9-4c68-b2f7-307fee899fe4\") " Dec 05 17:50:04 crc kubenswrapper[4753]: I1205 17:50:04.026765 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87333285-bec9-4c68-b2f7-307fee899fe4-kube-api-access-jdn84" (OuterVolumeSpecName: "kube-api-access-jdn84") pod "87333285-bec9-4c68-b2f7-307fee899fe4" (UID: "87333285-bec9-4c68-b2f7-307fee899fe4"). InnerVolumeSpecName "kube-api-access-jdn84". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:50:04 crc kubenswrapper[4753]: I1205 17:50:04.054693 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87333285-bec9-4c68-b2f7-307fee899fe4-inventory" (OuterVolumeSpecName: "inventory") pod "87333285-bec9-4c68-b2f7-307fee899fe4" (UID: "87333285-bec9-4c68-b2f7-307fee899fe4"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:50:04 crc kubenswrapper[4753]: I1205 17:50:04.055209 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87333285-bec9-4c68-b2f7-307fee899fe4-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "87333285-bec9-4c68-b2f7-307fee899fe4" (UID: "87333285-bec9-4c68-b2f7-307fee899fe4"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:50:04 crc kubenswrapper[4753]: I1205 17:50:04.120967 4753 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/87333285-bec9-4c68-b2f7-307fee899fe4-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 17:50:04 crc kubenswrapper[4753]: I1205 17:50:04.120996 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jdn84\" (UniqueName: \"kubernetes.io/projected/87333285-bec9-4c68-b2f7-307fee899fe4-kube-api-access-jdn84\") on node \"crc\" DevicePath \"\"" Dec 05 17:50:04 crc kubenswrapper[4753]: I1205 17:50:04.121008 4753 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/87333285-bec9-4c68-b2f7-307fee899fe4-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 17:50:04 crc kubenswrapper[4753]: I1205 17:50:04.331421 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qwwlp" event={"ID":"87333285-bec9-4c68-b2f7-307fee899fe4","Type":"ContainerDied","Data":"ccc17f1a0530d198ca135a9018fabcd8251e8125fb9674577322d1b2fdbdd780"} Dec 05 17:50:04 crc kubenswrapper[4753]: I1205 17:50:04.331476 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ccc17f1a0530d198ca135a9018fabcd8251e8125fb9674577322d1b2fdbdd780" Dec 05 17:50:04 crc kubenswrapper[4753]: I1205 17:50:04.331510 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-qwwlp" Dec 05 17:50:04 crc kubenswrapper[4753]: I1205 17:50:04.439824 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2c89w"] Dec 05 17:50:04 crc kubenswrapper[4753]: E1205 17:50:04.440353 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="87333285-bec9-4c68-b2f7-307fee899fe4" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 05 17:50:04 crc kubenswrapper[4753]: I1205 17:50:04.440375 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="87333285-bec9-4c68-b2f7-307fee899fe4" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 05 17:50:04 crc kubenswrapper[4753]: I1205 17:50:04.440703 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="87333285-bec9-4c68-b2f7-307fee899fe4" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 05 17:50:04 crc kubenswrapper[4753]: I1205 17:50:04.441621 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2c89w" Dec 05 17:50:04 crc kubenswrapper[4753]: I1205 17:50:04.443419 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-neutron-metadata-default-certs-0" Dec 05 17:50:04 crc kubenswrapper[4753]: I1205 17:50:04.444668 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-2445p" Dec 05 17:50:04 crc kubenswrapper[4753]: I1205 17:50:04.444940 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 17:50:04 crc kubenswrapper[4753]: I1205 17:50:04.445216 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 17:50:04 crc kubenswrapper[4753]: I1205 17:50:04.445523 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-telemetry-default-certs-0" Dec 05 17:50:04 crc kubenswrapper[4753]: I1205 17:50:04.445850 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-libvirt-default-certs-0" Dec 05 17:50:04 crc kubenswrapper[4753]: I1205 17:50:04.446101 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 17:50:04 crc kubenswrapper[4753]: I1205 17:50:04.446586 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-ovn-default-certs-0" Dec 05 17:50:04 crc kubenswrapper[4753]: I1205 17:50:04.473350 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2c89w"] Dec 05 17:50:04 crc kubenswrapper[4753]: I1205 17:50:04.529638 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2c89w\" (UID: \"3e9951b8-30f1-4aea-947e-d69fcb39bdcf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2c89w" Dec 05 17:50:04 crc kubenswrapper[4753]: I1205 17:50:04.529727 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2c89w\" (UID: \"3e9951b8-30f1-4aea-947e-d69fcb39bdcf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2c89w" Dec 05 17:50:04 crc kubenswrapper[4753]: I1205 17:50:04.529762 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2c89w\" (UID: \"3e9951b8-30f1-4aea-947e-d69fcb39bdcf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2c89w" Dec 05 17:50:04 crc kubenswrapper[4753]: I1205 17:50:04.529885 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5ww64\" (UniqueName: \"kubernetes.io/projected/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-kube-api-access-5ww64\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2c89w\" (UID: \"3e9951b8-30f1-4aea-947e-d69fcb39bdcf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2c89w" Dec 05 17:50:04 crc kubenswrapper[4753]: I1205 17:50:04.529929 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2c89w\" (UID: \"3e9951b8-30f1-4aea-947e-d69fcb39bdcf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2c89w" Dec 05 17:50:04 crc kubenswrapper[4753]: I1205 17:50:04.529950 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2c89w\" (UID: \"3e9951b8-30f1-4aea-947e-d69fcb39bdcf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2c89w" Dec 05 17:50:04 crc kubenswrapper[4753]: I1205 17:50:04.529970 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2c89w\" (UID: \"3e9951b8-30f1-4aea-947e-d69fcb39bdcf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2c89w" Dec 05 17:50:04 crc kubenswrapper[4753]: I1205 17:50:04.530012 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2c89w\" (UID: \"3e9951b8-30f1-4aea-947e-d69fcb39bdcf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2c89w" Dec 05 17:50:04 crc kubenswrapper[4753]: I1205 17:50:04.530048 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2c89w\" (UID: \"3e9951b8-30f1-4aea-947e-d69fcb39bdcf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2c89w" Dec 05 17:50:04 crc kubenswrapper[4753]: I1205 17:50:04.530068 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2c89w\" (UID: \"3e9951b8-30f1-4aea-947e-d69fcb39bdcf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2c89w" Dec 05 17:50:04 crc kubenswrapper[4753]: I1205 17:50:04.530123 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2c89w\" (UID: \"3e9951b8-30f1-4aea-947e-d69fcb39bdcf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2c89w" Dec 05 17:50:04 crc kubenswrapper[4753]: I1205 17:50:04.530199 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2c89w\" (UID: \"3e9951b8-30f1-4aea-947e-d69fcb39bdcf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2c89w" Dec 05 17:50:04 crc kubenswrapper[4753]: I1205 17:50:04.530291 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2c89w\" (UID: \"3e9951b8-30f1-4aea-947e-d69fcb39bdcf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2c89w" Dec 05 17:50:04 crc kubenswrapper[4753]: I1205 17:50:04.530338 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2c89w\" (UID: \"3e9951b8-30f1-4aea-947e-d69fcb39bdcf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2c89w" Dec 05 17:50:04 crc kubenswrapper[4753]: I1205 17:50:04.631578 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2c89w\" (UID: \"3e9951b8-30f1-4aea-947e-d69fcb39bdcf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2c89w" Dec 05 17:50:04 crc kubenswrapper[4753]: I1205 17:50:04.631651 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2c89w\" (UID: \"3e9951b8-30f1-4aea-947e-d69fcb39bdcf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2c89w" Dec 05 17:50:04 crc kubenswrapper[4753]: I1205 17:50:04.631708 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2c89w\" (UID: \"3e9951b8-30f1-4aea-947e-d69fcb39bdcf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2c89w" Dec 05 17:50:04 crc kubenswrapper[4753]: I1205 17:50:04.631733 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2c89w\" (UID: \"3e9951b8-30f1-4aea-947e-d69fcb39bdcf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2c89w" Dec 05 17:50:04 crc kubenswrapper[4753]: I1205 17:50:04.631766 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2c89w\" (UID: \"3e9951b8-30f1-4aea-947e-d69fcb39bdcf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2c89w" Dec 05 17:50:04 crc kubenswrapper[4753]: I1205 17:50:04.631799 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5ww64\" (UniqueName: \"kubernetes.io/projected/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-kube-api-access-5ww64\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2c89w\" (UID: \"3e9951b8-30f1-4aea-947e-d69fcb39bdcf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2c89w" Dec 05 17:50:04 crc kubenswrapper[4753]: I1205 17:50:04.631825 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2c89w\" (UID: \"3e9951b8-30f1-4aea-947e-d69fcb39bdcf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2c89w" Dec 05 17:50:04 crc kubenswrapper[4753]: I1205 17:50:04.631846 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2c89w\" (UID: \"3e9951b8-30f1-4aea-947e-d69fcb39bdcf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2c89w" Dec 05 17:50:04 crc kubenswrapper[4753]: I1205 17:50:04.631865 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2c89w\" (UID: \"3e9951b8-30f1-4aea-947e-d69fcb39bdcf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2c89w" Dec 05 17:50:04 crc kubenswrapper[4753]: I1205 17:50:04.631895 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2c89w\" (UID: \"3e9951b8-30f1-4aea-947e-d69fcb39bdcf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2c89w" Dec 05 17:50:04 crc kubenswrapper[4753]: I1205 17:50:04.631937 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2c89w\" (UID: \"3e9951b8-30f1-4aea-947e-d69fcb39bdcf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2c89w" Dec 05 17:50:04 crc kubenswrapper[4753]: I1205 17:50:04.631965 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2c89w\" (UID: \"3e9951b8-30f1-4aea-947e-d69fcb39bdcf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2c89w" Dec 05 17:50:04 crc kubenswrapper[4753]: I1205 17:50:04.632026 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2c89w\" (UID: \"3e9951b8-30f1-4aea-947e-d69fcb39bdcf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2c89w" Dec 05 17:50:04 crc kubenswrapper[4753]: I1205 17:50:04.632066 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2c89w\" (UID: \"3e9951b8-30f1-4aea-947e-d69fcb39bdcf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2c89w" Dec 05 17:50:04 crc kubenswrapper[4753]: I1205 17:50:04.637379 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2c89w\" (UID: \"3e9951b8-30f1-4aea-947e-d69fcb39bdcf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2c89w" Dec 05 17:50:04 crc kubenswrapper[4753]: I1205 17:50:04.637697 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2c89w\" (UID: \"3e9951b8-30f1-4aea-947e-d69fcb39bdcf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2c89w" Dec 05 17:50:04 crc kubenswrapper[4753]: I1205 17:50:04.637752 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2c89w\" (UID: \"3e9951b8-30f1-4aea-947e-d69fcb39bdcf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2c89w" Dec 05 17:50:04 crc kubenswrapper[4753]: I1205 17:50:04.638398 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2c89w\" (UID: \"3e9951b8-30f1-4aea-947e-d69fcb39bdcf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2c89w" Dec 05 17:50:04 crc kubenswrapper[4753]: I1205 17:50:04.638431 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2c89w\" (UID: \"3e9951b8-30f1-4aea-947e-d69fcb39bdcf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2c89w" Dec 05 17:50:04 crc kubenswrapper[4753]: I1205 17:50:04.639573 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2c89w\" (UID: \"3e9951b8-30f1-4aea-947e-d69fcb39bdcf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2c89w" Dec 05 17:50:04 crc kubenswrapper[4753]: I1205 17:50:04.640353 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2c89w\" (UID: \"3e9951b8-30f1-4aea-947e-d69fcb39bdcf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2c89w" Dec 05 17:50:04 crc kubenswrapper[4753]: I1205 17:50:04.641127 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2c89w\" (UID: \"3e9951b8-30f1-4aea-947e-d69fcb39bdcf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2c89w" Dec 05 17:50:04 crc kubenswrapper[4753]: I1205 17:50:04.641560 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2c89w\" (UID: \"3e9951b8-30f1-4aea-947e-d69fcb39bdcf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2c89w" Dec 05 17:50:04 crc kubenswrapper[4753]: I1205 17:50:04.642121 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2c89w\" (UID: \"3e9951b8-30f1-4aea-947e-d69fcb39bdcf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2c89w" Dec 05 17:50:04 crc kubenswrapper[4753]: I1205 17:50:04.643713 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2c89w\" (UID: \"3e9951b8-30f1-4aea-947e-d69fcb39bdcf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2c89w" Dec 05 17:50:04 crc kubenswrapper[4753]: I1205 17:50:04.643850 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2c89w\" (UID: \"3e9951b8-30f1-4aea-947e-d69fcb39bdcf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2c89w" Dec 05 17:50:04 crc kubenswrapper[4753]: I1205 17:50:04.649469 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2c89w\" (UID: \"3e9951b8-30f1-4aea-947e-d69fcb39bdcf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2c89w" Dec 05 17:50:04 crc kubenswrapper[4753]: I1205 17:50:04.652775 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5ww64\" (UniqueName: \"kubernetes.io/projected/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-kube-api-access-5ww64\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-2c89w\" (UID: \"3e9951b8-30f1-4aea-947e-d69fcb39bdcf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2c89w" Dec 05 17:50:04 crc kubenswrapper[4753]: I1205 17:50:04.761054 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2c89w" Dec 05 17:50:05 crc kubenswrapper[4753]: I1205 17:50:05.375844 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2c89w"] Dec 05 17:50:05 crc kubenswrapper[4753]: W1205 17:50:05.385363 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3e9951b8_30f1_4aea_947e_d69fcb39bdcf.slice/crio-91bdb0c90508eb1d21a51a83eec8989fea1a3affa1f22060b8dc6a5d12487ee9 WatchSource:0}: Error finding container 91bdb0c90508eb1d21a51a83eec8989fea1a3affa1f22060b8dc6a5d12487ee9: Status 404 returned error can't find the container with id 91bdb0c90508eb1d21a51a83eec8989fea1a3affa1f22060b8dc6a5d12487ee9 Dec 05 17:50:06 crc kubenswrapper[4753]: I1205 17:50:06.354829 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2c89w" event={"ID":"3e9951b8-30f1-4aea-947e-d69fcb39bdcf","Type":"ContainerStarted","Data":"467f27adfb0e302d5a587f6b41017d169e03370deb72bf3e7ce6defe341c4f7d"} Dec 05 17:50:06 crc kubenswrapper[4753]: I1205 17:50:06.355246 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2c89w" event={"ID":"3e9951b8-30f1-4aea-947e-d69fcb39bdcf","Type":"ContainerStarted","Data":"91bdb0c90508eb1d21a51a83eec8989fea1a3affa1f22060b8dc6a5d12487ee9"} Dec 05 17:50:06 crc kubenswrapper[4753]: I1205 17:50:06.381942 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2c89w" podStartSLOduration=1.9806832920000002 podStartE2EDuration="2.381920161s" podCreationTimestamp="2025-12-05 17:50:04 +0000 UTC" firstStartedPulling="2025-12-05 17:50:05.387988887 +0000 UTC m=+2743.891095893" lastFinishedPulling="2025-12-05 17:50:05.789225716 +0000 UTC m=+2744.292332762" observedRunningTime="2025-12-05 17:50:06.378268177 +0000 UTC m=+2744.881375183" watchObservedRunningTime="2025-12-05 17:50:06.381920161 +0000 UTC m=+2744.885027167" Dec 05 17:50:44 crc kubenswrapper[4753]: I1205 17:50:44.814247 4753 generic.go:334] "Generic (PLEG): container finished" podID="3e9951b8-30f1-4aea-947e-d69fcb39bdcf" containerID="467f27adfb0e302d5a587f6b41017d169e03370deb72bf3e7ce6defe341c4f7d" exitCode=0 Dec 05 17:50:44 crc kubenswrapper[4753]: I1205 17:50:44.814364 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2c89w" event={"ID":"3e9951b8-30f1-4aea-947e-d69fcb39bdcf","Type":"ContainerDied","Data":"467f27adfb0e302d5a587f6b41017d169e03370deb72bf3e7ce6defe341c4f7d"} Dec 05 17:50:46 crc kubenswrapper[4753]: I1205 17:50:46.396884 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2c89w" Dec 05 17:50:46 crc kubenswrapper[4753]: I1205 17:50:46.439358 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-repo-setup-combined-ca-bundle\") pod \"3e9951b8-30f1-4aea-947e-d69fcb39bdcf\" (UID: \"3e9951b8-30f1-4aea-947e-d69fcb39bdcf\") " Dec 05 17:50:46 crc kubenswrapper[4753]: I1205 17:50:46.440227 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5ww64\" (UniqueName: \"kubernetes.io/projected/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-kube-api-access-5ww64\") pod \"3e9951b8-30f1-4aea-947e-d69fcb39bdcf\" (UID: \"3e9951b8-30f1-4aea-947e-d69fcb39bdcf\") " Dec 05 17:50:46 crc kubenswrapper[4753]: I1205 17:50:46.440286 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"3e9951b8-30f1-4aea-947e-d69fcb39bdcf\" (UID: \"3e9951b8-30f1-4aea-947e-d69fcb39bdcf\") " Dec 05 17:50:46 crc kubenswrapper[4753]: I1205 17:50:46.440332 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-ovn-combined-ca-bundle\") pod \"3e9951b8-30f1-4aea-947e-d69fcb39bdcf\" (UID: \"3e9951b8-30f1-4aea-947e-d69fcb39bdcf\") " Dec 05 17:50:46 crc kubenswrapper[4753]: I1205 17:50:46.440372 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-inventory\") pod \"3e9951b8-30f1-4aea-947e-d69fcb39bdcf\" (UID: \"3e9951b8-30f1-4aea-947e-d69fcb39bdcf\") " Dec 05 17:50:46 crc kubenswrapper[4753]: I1205 17:50:46.440419 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"3e9951b8-30f1-4aea-947e-d69fcb39bdcf\" (UID: \"3e9951b8-30f1-4aea-947e-d69fcb39bdcf\") " Dec 05 17:50:46 crc kubenswrapper[4753]: I1205 17:50:46.440501 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-neutron-metadata-combined-ca-bundle\") pod \"3e9951b8-30f1-4aea-947e-d69fcb39bdcf\" (UID: \"3e9951b8-30f1-4aea-947e-d69fcb39bdcf\") " Dec 05 17:50:46 crc kubenswrapper[4753]: I1205 17:50:46.440531 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-libvirt-combined-ca-bundle\") pod \"3e9951b8-30f1-4aea-947e-d69fcb39bdcf\" (UID: \"3e9951b8-30f1-4aea-947e-d69fcb39bdcf\") " Dec 05 17:50:46 crc kubenswrapper[4753]: I1205 17:50:46.440564 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"3e9951b8-30f1-4aea-947e-d69fcb39bdcf\" (UID: \"3e9951b8-30f1-4aea-947e-d69fcb39bdcf\") " Dec 05 17:50:46 crc kubenswrapper[4753]: I1205 17:50:46.440596 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-openstack-edpm-ipam-ovn-default-certs-0\") pod \"3e9951b8-30f1-4aea-947e-d69fcb39bdcf\" (UID: \"3e9951b8-30f1-4aea-947e-d69fcb39bdcf\") " Dec 05 17:50:46 crc kubenswrapper[4753]: I1205 17:50:46.440628 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-telemetry-combined-ca-bundle\") pod \"3e9951b8-30f1-4aea-947e-d69fcb39bdcf\" (UID: \"3e9951b8-30f1-4aea-947e-d69fcb39bdcf\") " Dec 05 17:50:46 crc kubenswrapper[4753]: I1205 17:50:46.440675 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-bootstrap-combined-ca-bundle\") pod \"3e9951b8-30f1-4aea-947e-d69fcb39bdcf\" (UID: \"3e9951b8-30f1-4aea-947e-d69fcb39bdcf\") " Dec 05 17:50:46 crc kubenswrapper[4753]: I1205 17:50:46.440698 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-nova-combined-ca-bundle\") pod \"3e9951b8-30f1-4aea-947e-d69fcb39bdcf\" (UID: \"3e9951b8-30f1-4aea-947e-d69fcb39bdcf\") " Dec 05 17:50:46 crc kubenswrapper[4753]: I1205 17:50:46.440738 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-ssh-key\") pod \"3e9951b8-30f1-4aea-947e-d69fcb39bdcf\" (UID: \"3e9951b8-30f1-4aea-947e-d69fcb39bdcf\") " Dec 05 17:50:46 crc kubenswrapper[4753]: I1205 17:50:46.446504 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-kube-api-access-5ww64" (OuterVolumeSpecName: "kube-api-access-5ww64") pod "3e9951b8-30f1-4aea-947e-d69fcb39bdcf" (UID: "3e9951b8-30f1-4aea-947e-d69fcb39bdcf"). InnerVolumeSpecName "kube-api-access-5ww64". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:50:46 crc kubenswrapper[4753]: I1205 17:50:46.447253 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "3e9951b8-30f1-4aea-947e-d69fcb39bdcf" (UID: "3e9951b8-30f1-4aea-947e-d69fcb39bdcf"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:50:46 crc kubenswrapper[4753]: I1205 17:50:46.447790 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "3e9951b8-30f1-4aea-947e-d69fcb39bdcf" (UID: "3e9951b8-30f1-4aea-947e-d69fcb39bdcf"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:50:46 crc kubenswrapper[4753]: I1205 17:50:46.448671 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "3e9951b8-30f1-4aea-947e-d69fcb39bdcf" (UID: "3e9951b8-30f1-4aea-947e-d69fcb39bdcf"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:50:46 crc kubenswrapper[4753]: I1205 17:50:46.449102 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-openstack-edpm-ipam-ovn-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-ovn-default-certs-0") pod "3e9951b8-30f1-4aea-947e-d69fcb39bdcf" (UID: "3e9951b8-30f1-4aea-947e-d69fcb39bdcf"). InnerVolumeSpecName "openstack-edpm-ipam-ovn-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:50:46 crc kubenswrapper[4753]: I1205 17:50:46.449746 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-openstack-edpm-ipam-neutron-metadata-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-neutron-metadata-default-certs-0") pod "3e9951b8-30f1-4aea-947e-d69fcb39bdcf" (UID: "3e9951b8-30f1-4aea-947e-d69fcb39bdcf"). InnerVolumeSpecName "openstack-edpm-ipam-neutron-metadata-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:50:46 crc kubenswrapper[4753]: I1205 17:50:46.450535 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "3e9951b8-30f1-4aea-947e-d69fcb39bdcf" (UID: "3e9951b8-30f1-4aea-947e-d69fcb39bdcf"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:50:46 crc kubenswrapper[4753]: I1205 17:50:46.451377 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "3e9951b8-30f1-4aea-947e-d69fcb39bdcf" (UID: "3e9951b8-30f1-4aea-947e-d69fcb39bdcf"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:50:46 crc kubenswrapper[4753]: I1205 17:50:46.451526 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-openstack-edpm-ipam-libvirt-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-libvirt-default-certs-0") pod "3e9951b8-30f1-4aea-947e-d69fcb39bdcf" (UID: "3e9951b8-30f1-4aea-947e-d69fcb39bdcf"). InnerVolumeSpecName "openstack-edpm-ipam-libvirt-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:50:46 crc kubenswrapper[4753]: I1205 17:50:46.452000 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-openstack-edpm-ipam-telemetry-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-telemetry-default-certs-0") pod "3e9951b8-30f1-4aea-947e-d69fcb39bdcf" (UID: "3e9951b8-30f1-4aea-947e-d69fcb39bdcf"). InnerVolumeSpecName "openstack-edpm-ipam-telemetry-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:50:46 crc kubenswrapper[4753]: I1205 17:50:46.452090 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "3e9951b8-30f1-4aea-947e-d69fcb39bdcf" (UID: "3e9951b8-30f1-4aea-947e-d69fcb39bdcf"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:50:46 crc kubenswrapper[4753]: I1205 17:50:46.452288 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "3e9951b8-30f1-4aea-947e-d69fcb39bdcf" (UID: "3e9951b8-30f1-4aea-947e-d69fcb39bdcf"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:50:46 crc kubenswrapper[4753]: I1205 17:50:46.481729 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "3e9951b8-30f1-4aea-947e-d69fcb39bdcf" (UID: "3e9951b8-30f1-4aea-947e-d69fcb39bdcf"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:50:46 crc kubenswrapper[4753]: I1205 17:50:46.483969 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-inventory" (OuterVolumeSpecName: "inventory") pod "3e9951b8-30f1-4aea-947e-d69fcb39bdcf" (UID: "3e9951b8-30f1-4aea-947e-d69fcb39bdcf"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:50:46 crc kubenswrapper[4753]: I1205 17:50:46.544746 4753 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-openstack-edpm-ipam-neutron-metadata-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 05 17:50:46 crc kubenswrapper[4753]: I1205 17:50:46.544806 4753 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:50:46 crc kubenswrapper[4753]: I1205 17:50:46.544827 4753 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 17:50:46 crc kubenswrapper[4753]: I1205 17:50:46.544847 4753 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-openstack-edpm-ipam-libvirt-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 05 17:50:46 crc kubenswrapper[4753]: I1205 17:50:46.544873 4753 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:50:46 crc kubenswrapper[4753]: I1205 17:50:46.544897 4753 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:50:46 crc kubenswrapper[4753]: I1205 17:50:46.544925 4753 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-openstack-edpm-ipam-telemetry-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 05 17:50:46 crc kubenswrapper[4753]: I1205 17:50:46.544948 4753 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-openstack-edpm-ipam-ovn-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 05 17:50:46 crc kubenswrapper[4753]: I1205 17:50:46.544967 4753 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:50:46 crc kubenswrapper[4753]: I1205 17:50:46.544986 4753 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:50:46 crc kubenswrapper[4753]: I1205 17:50:46.545003 4753 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:50:46 crc kubenswrapper[4753]: I1205 17:50:46.545024 4753 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 17:50:46 crc kubenswrapper[4753]: I1205 17:50:46.545042 4753 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:50:46 crc kubenswrapper[4753]: I1205 17:50:46.545062 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5ww64\" (UniqueName: \"kubernetes.io/projected/3e9951b8-30f1-4aea-947e-d69fcb39bdcf-kube-api-access-5ww64\") on node \"crc\" DevicePath \"\"" Dec 05 17:50:46 crc kubenswrapper[4753]: I1205 17:50:46.843629 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2c89w" event={"ID":"3e9951b8-30f1-4aea-947e-d69fcb39bdcf","Type":"ContainerDied","Data":"91bdb0c90508eb1d21a51a83eec8989fea1a3affa1f22060b8dc6a5d12487ee9"} Dec 05 17:50:46 crc kubenswrapper[4753]: I1205 17:50:46.844438 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="91bdb0c90508eb1d21a51a83eec8989fea1a3affa1f22060b8dc6a5d12487ee9" Dec 05 17:50:46 crc kubenswrapper[4753]: I1205 17:50:46.843731 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-2c89w" Dec 05 17:50:46 crc kubenswrapper[4753]: I1205 17:50:46.976266 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-kx88v"] Dec 05 17:50:46 crc kubenswrapper[4753]: E1205 17:50:46.976869 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3e9951b8-30f1-4aea-947e-d69fcb39bdcf" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 05 17:50:46 crc kubenswrapper[4753]: I1205 17:50:46.976898 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="3e9951b8-30f1-4aea-947e-d69fcb39bdcf" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 05 17:50:46 crc kubenswrapper[4753]: I1205 17:50:46.977296 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="3e9951b8-30f1-4aea-947e-d69fcb39bdcf" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 05 17:50:46 crc kubenswrapper[4753]: I1205 17:50:46.978342 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-kx88v" Dec 05 17:50:46 crc kubenswrapper[4753]: I1205 17:50:46.980857 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Dec 05 17:50:46 crc kubenswrapper[4753]: I1205 17:50:46.981004 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 17:50:46 crc kubenswrapper[4753]: I1205 17:50:46.981195 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-2445p" Dec 05 17:50:46 crc kubenswrapper[4753]: I1205 17:50:46.981577 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 17:50:46 crc kubenswrapper[4753]: I1205 17:50:46.982054 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 17:50:46 crc kubenswrapper[4753]: I1205 17:50:46.987413 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-kx88v"] Dec 05 17:50:47 crc kubenswrapper[4753]: I1205 17:50:47.058801 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zdhvt\" (UniqueName: \"kubernetes.io/projected/e2f8ca40-16d5-4a17-80a1-f5bf12f92d71-kube-api-access-zdhvt\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-kx88v\" (UID: \"e2f8ca40-16d5-4a17-80a1-f5bf12f92d71\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-kx88v" Dec 05 17:50:47 crc kubenswrapper[4753]: I1205 17:50:47.058845 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e2f8ca40-16d5-4a17-80a1-f5bf12f92d71-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-kx88v\" (UID: \"e2f8ca40-16d5-4a17-80a1-f5bf12f92d71\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-kx88v" Dec 05 17:50:47 crc kubenswrapper[4753]: I1205 17:50:47.059590 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/e2f8ca40-16d5-4a17-80a1-f5bf12f92d71-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-kx88v\" (UID: \"e2f8ca40-16d5-4a17-80a1-f5bf12f92d71\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-kx88v" Dec 05 17:50:47 crc kubenswrapper[4753]: I1205 17:50:47.059735 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e2f8ca40-16d5-4a17-80a1-f5bf12f92d71-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-kx88v\" (UID: \"e2f8ca40-16d5-4a17-80a1-f5bf12f92d71\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-kx88v" Dec 05 17:50:47 crc kubenswrapper[4753]: I1205 17:50:47.059799 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2f8ca40-16d5-4a17-80a1-f5bf12f92d71-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-kx88v\" (UID: \"e2f8ca40-16d5-4a17-80a1-f5bf12f92d71\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-kx88v" Dec 05 17:50:47 crc kubenswrapper[4753]: I1205 17:50:47.161922 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e2f8ca40-16d5-4a17-80a1-f5bf12f92d71-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-kx88v\" (UID: \"e2f8ca40-16d5-4a17-80a1-f5bf12f92d71\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-kx88v" Dec 05 17:50:47 crc kubenswrapper[4753]: I1205 17:50:47.162033 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2f8ca40-16d5-4a17-80a1-f5bf12f92d71-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-kx88v\" (UID: \"e2f8ca40-16d5-4a17-80a1-f5bf12f92d71\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-kx88v" Dec 05 17:50:47 crc kubenswrapper[4753]: I1205 17:50:47.162205 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zdhvt\" (UniqueName: \"kubernetes.io/projected/e2f8ca40-16d5-4a17-80a1-f5bf12f92d71-kube-api-access-zdhvt\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-kx88v\" (UID: \"e2f8ca40-16d5-4a17-80a1-f5bf12f92d71\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-kx88v" Dec 05 17:50:47 crc kubenswrapper[4753]: I1205 17:50:47.162248 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e2f8ca40-16d5-4a17-80a1-f5bf12f92d71-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-kx88v\" (UID: \"e2f8ca40-16d5-4a17-80a1-f5bf12f92d71\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-kx88v" Dec 05 17:50:47 crc kubenswrapper[4753]: I1205 17:50:47.162323 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/e2f8ca40-16d5-4a17-80a1-f5bf12f92d71-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-kx88v\" (UID: \"e2f8ca40-16d5-4a17-80a1-f5bf12f92d71\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-kx88v" Dec 05 17:50:47 crc kubenswrapper[4753]: I1205 17:50:47.163681 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/e2f8ca40-16d5-4a17-80a1-f5bf12f92d71-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-kx88v\" (UID: \"e2f8ca40-16d5-4a17-80a1-f5bf12f92d71\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-kx88v" Dec 05 17:50:47 crc kubenswrapper[4753]: I1205 17:50:47.167724 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e2f8ca40-16d5-4a17-80a1-f5bf12f92d71-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-kx88v\" (UID: \"e2f8ca40-16d5-4a17-80a1-f5bf12f92d71\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-kx88v" Dec 05 17:50:47 crc kubenswrapper[4753]: I1205 17:50:47.170837 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e2f8ca40-16d5-4a17-80a1-f5bf12f92d71-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-kx88v\" (UID: \"e2f8ca40-16d5-4a17-80a1-f5bf12f92d71\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-kx88v" Dec 05 17:50:47 crc kubenswrapper[4753]: I1205 17:50:47.171425 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2f8ca40-16d5-4a17-80a1-f5bf12f92d71-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-kx88v\" (UID: \"e2f8ca40-16d5-4a17-80a1-f5bf12f92d71\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-kx88v" Dec 05 17:50:47 crc kubenswrapper[4753]: I1205 17:50:47.184980 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zdhvt\" (UniqueName: \"kubernetes.io/projected/e2f8ca40-16d5-4a17-80a1-f5bf12f92d71-kube-api-access-zdhvt\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-kx88v\" (UID: \"e2f8ca40-16d5-4a17-80a1-f5bf12f92d71\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-kx88v" Dec 05 17:50:47 crc kubenswrapper[4753]: I1205 17:50:47.303778 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-kx88v" Dec 05 17:50:47 crc kubenswrapper[4753]: I1205 17:50:47.858315 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-kx88v"] Dec 05 17:50:48 crc kubenswrapper[4753]: I1205 17:50:48.866030 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-kx88v" event={"ID":"e2f8ca40-16d5-4a17-80a1-f5bf12f92d71","Type":"ContainerStarted","Data":"67075ea2c036b43a94e47bc94ba85b1ce8967cf96ecc5469cc7915bfd40d9f4a"} Dec 05 17:50:48 crc kubenswrapper[4753]: I1205 17:50:48.866664 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-kx88v" event={"ID":"e2f8ca40-16d5-4a17-80a1-f5bf12f92d71","Type":"ContainerStarted","Data":"2f145e6cd20a40d93ba39235813d553fa615ed6e397df26f41b9a46203f4fcd8"} Dec 05 17:50:48 crc kubenswrapper[4753]: I1205 17:50:48.891368 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-kx88v" podStartSLOduration=2.44632932 podStartE2EDuration="2.891348897s" podCreationTimestamp="2025-12-05 17:50:46 +0000 UTC" firstStartedPulling="2025-12-05 17:50:47.863786212 +0000 UTC m=+2786.366893238" lastFinishedPulling="2025-12-05 17:50:48.308805809 +0000 UTC m=+2786.811912815" observedRunningTime="2025-12-05 17:50:48.886763617 +0000 UTC m=+2787.389870633" watchObservedRunningTime="2025-12-05 17:50:48.891348897 +0000 UTC m=+2787.394455903" Dec 05 17:50:58 crc kubenswrapper[4753]: I1205 17:50:58.159488 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-cvnpx"] Dec 05 17:50:58 crc kubenswrapper[4753]: I1205 17:50:58.173290 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cvnpx" Dec 05 17:50:58 crc kubenswrapper[4753]: I1205 17:50:58.195015 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-cvnpx"] Dec 05 17:50:58 crc kubenswrapper[4753]: I1205 17:50:58.226496 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/419cf9c7-9e23-470a-bcbe-9094f639c3fc-utilities\") pod \"redhat-operators-cvnpx\" (UID: \"419cf9c7-9e23-470a-bcbe-9094f639c3fc\") " pod="openshift-marketplace/redhat-operators-cvnpx" Dec 05 17:50:58 crc kubenswrapper[4753]: I1205 17:50:58.226606 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j874z\" (UniqueName: \"kubernetes.io/projected/419cf9c7-9e23-470a-bcbe-9094f639c3fc-kube-api-access-j874z\") pod \"redhat-operators-cvnpx\" (UID: \"419cf9c7-9e23-470a-bcbe-9094f639c3fc\") " pod="openshift-marketplace/redhat-operators-cvnpx" Dec 05 17:50:58 crc kubenswrapper[4753]: I1205 17:50:58.226725 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/419cf9c7-9e23-470a-bcbe-9094f639c3fc-catalog-content\") pod \"redhat-operators-cvnpx\" (UID: \"419cf9c7-9e23-470a-bcbe-9094f639c3fc\") " pod="openshift-marketplace/redhat-operators-cvnpx" Dec 05 17:50:58 crc kubenswrapper[4753]: I1205 17:50:58.328322 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/419cf9c7-9e23-470a-bcbe-9094f639c3fc-utilities\") pod \"redhat-operators-cvnpx\" (UID: \"419cf9c7-9e23-470a-bcbe-9094f639c3fc\") " pod="openshift-marketplace/redhat-operators-cvnpx" Dec 05 17:50:58 crc kubenswrapper[4753]: I1205 17:50:58.328643 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j874z\" (UniqueName: \"kubernetes.io/projected/419cf9c7-9e23-470a-bcbe-9094f639c3fc-kube-api-access-j874z\") pod \"redhat-operators-cvnpx\" (UID: \"419cf9c7-9e23-470a-bcbe-9094f639c3fc\") " pod="openshift-marketplace/redhat-operators-cvnpx" Dec 05 17:50:58 crc kubenswrapper[4753]: I1205 17:50:58.328709 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/419cf9c7-9e23-470a-bcbe-9094f639c3fc-catalog-content\") pod \"redhat-operators-cvnpx\" (UID: \"419cf9c7-9e23-470a-bcbe-9094f639c3fc\") " pod="openshift-marketplace/redhat-operators-cvnpx" Dec 05 17:50:58 crc kubenswrapper[4753]: I1205 17:50:58.328953 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/419cf9c7-9e23-470a-bcbe-9094f639c3fc-utilities\") pod \"redhat-operators-cvnpx\" (UID: \"419cf9c7-9e23-470a-bcbe-9094f639c3fc\") " pod="openshift-marketplace/redhat-operators-cvnpx" Dec 05 17:50:58 crc kubenswrapper[4753]: I1205 17:50:58.329135 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/419cf9c7-9e23-470a-bcbe-9094f639c3fc-catalog-content\") pod \"redhat-operators-cvnpx\" (UID: \"419cf9c7-9e23-470a-bcbe-9094f639c3fc\") " pod="openshift-marketplace/redhat-operators-cvnpx" Dec 05 17:50:58 crc kubenswrapper[4753]: I1205 17:50:58.367451 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j874z\" (UniqueName: \"kubernetes.io/projected/419cf9c7-9e23-470a-bcbe-9094f639c3fc-kube-api-access-j874z\") pod \"redhat-operators-cvnpx\" (UID: \"419cf9c7-9e23-470a-bcbe-9094f639c3fc\") " pod="openshift-marketplace/redhat-operators-cvnpx" Dec 05 17:50:58 crc kubenswrapper[4753]: I1205 17:50:58.506253 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cvnpx" Dec 05 17:50:58 crc kubenswrapper[4753]: I1205 17:50:58.984804 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-cvnpx"] Dec 05 17:50:59 crc kubenswrapper[4753]: I1205 17:50:59.998227 4753 generic.go:334] "Generic (PLEG): container finished" podID="419cf9c7-9e23-470a-bcbe-9094f639c3fc" containerID="8b99004559aa6f5bc6b7d3c3c4ddd07c01ce5db51022db2cf4363f48cdd3e4ce" exitCode=0 Dec 05 17:50:59 crc kubenswrapper[4753]: I1205 17:50:59.998322 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cvnpx" event={"ID":"419cf9c7-9e23-470a-bcbe-9094f639c3fc","Type":"ContainerDied","Data":"8b99004559aa6f5bc6b7d3c3c4ddd07c01ce5db51022db2cf4363f48cdd3e4ce"} Dec 05 17:50:59 crc kubenswrapper[4753]: I1205 17:50:59.998565 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cvnpx" event={"ID":"419cf9c7-9e23-470a-bcbe-9094f639c3fc","Type":"ContainerStarted","Data":"6271608f93ba4aa8ee6f21f09a69772dac2d689b1b0e95be52240ef6f56706d8"} Dec 05 17:51:02 crc kubenswrapper[4753]: I1205 17:51:02.024177 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cvnpx" event={"ID":"419cf9c7-9e23-470a-bcbe-9094f639c3fc","Type":"ContainerStarted","Data":"6c64bad39a5325aaa1fcdc0e337db392f727f8008ceb236bd681edf28e9e7b98"} Dec 05 17:51:03 crc kubenswrapper[4753]: I1205 17:51:03.037378 4753 generic.go:334] "Generic (PLEG): container finished" podID="419cf9c7-9e23-470a-bcbe-9094f639c3fc" containerID="6c64bad39a5325aaa1fcdc0e337db392f727f8008ceb236bd681edf28e9e7b98" exitCode=0 Dec 05 17:51:03 crc kubenswrapper[4753]: I1205 17:51:03.039320 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cvnpx" event={"ID":"419cf9c7-9e23-470a-bcbe-9094f639c3fc","Type":"ContainerDied","Data":"6c64bad39a5325aaa1fcdc0e337db392f727f8008ceb236bd681edf28e9e7b98"} Dec 05 17:51:04 crc kubenswrapper[4753]: I1205 17:51:04.052892 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cvnpx" event={"ID":"419cf9c7-9e23-470a-bcbe-9094f639c3fc","Type":"ContainerStarted","Data":"0cde5b8f8c9fb7c97b79149c832a475b3b8febc080127fb439cf5f349c5b70e9"} Dec 05 17:51:04 crc kubenswrapper[4753]: I1205 17:51:04.089919 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-cvnpx" podStartSLOduration=2.650292876 podStartE2EDuration="6.089879797s" podCreationTimestamp="2025-12-05 17:50:58 +0000 UTC" firstStartedPulling="2025-12-05 17:51:00.00133598 +0000 UTC m=+2798.504442986" lastFinishedPulling="2025-12-05 17:51:03.440922901 +0000 UTC m=+2801.944029907" observedRunningTime="2025-12-05 17:51:04.070789487 +0000 UTC m=+2802.573896493" watchObservedRunningTime="2025-12-05 17:51:04.089879797 +0000 UTC m=+2802.592986833" Dec 05 17:51:05 crc kubenswrapper[4753]: I1205 17:51:05.881136 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-ngsxv"] Dec 05 17:51:05 crc kubenswrapper[4753]: I1205 17:51:05.883820 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ngsxv" Dec 05 17:51:05 crc kubenswrapper[4753]: I1205 17:51:05.901433 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-ngsxv"] Dec 05 17:51:06 crc kubenswrapper[4753]: I1205 17:51:06.020707 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a773c219-837b-4f2a-a595-3def4881351e-utilities\") pod \"community-operators-ngsxv\" (UID: \"a773c219-837b-4f2a-a595-3def4881351e\") " pod="openshift-marketplace/community-operators-ngsxv" Dec 05 17:51:06 crc kubenswrapper[4753]: I1205 17:51:06.020804 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a773c219-837b-4f2a-a595-3def4881351e-catalog-content\") pod \"community-operators-ngsxv\" (UID: \"a773c219-837b-4f2a-a595-3def4881351e\") " pod="openshift-marketplace/community-operators-ngsxv" Dec 05 17:51:06 crc kubenswrapper[4753]: I1205 17:51:06.020967 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bxm6m\" (UniqueName: \"kubernetes.io/projected/a773c219-837b-4f2a-a595-3def4881351e-kube-api-access-bxm6m\") pod \"community-operators-ngsxv\" (UID: \"a773c219-837b-4f2a-a595-3def4881351e\") " pod="openshift-marketplace/community-operators-ngsxv" Dec 05 17:51:06 crc kubenswrapper[4753]: I1205 17:51:06.122741 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bxm6m\" (UniqueName: \"kubernetes.io/projected/a773c219-837b-4f2a-a595-3def4881351e-kube-api-access-bxm6m\") pod \"community-operators-ngsxv\" (UID: \"a773c219-837b-4f2a-a595-3def4881351e\") " pod="openshift-marketplace/community-operators-ngsxv" Dec 05 17:51:06 crc kubenswrapper[4753]: I1205 17:51:06.122822 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a773c219-837b-4f2a-a595-3def4881351e-utilities\") pod \"community-operators-ngsxv\" (UID: \"a773c219-837b-4f2a-a595-3def4881351e\") " pod="openshift-marketplace/community-operators-ngsxv" Dec 05 17:51:06 crc kubenswrapper[4753]: I1205 17:51:06.122896 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a773c219-837b-4f2a-a595-3def4881351e-catalog-content\") pod \"community-operators-ngsxv\" (UID: \"a773c219-837b-4f2a-a595-3def4881351e\") " pod="openshift-marketplace/community-operators-ngsxv" Dec 05 17:51:06 crc kubenswrapper[4753]: I1205 17:51:06.123558 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a773c219-837b-4f2a-a595-3def4881351e-catalog-content\") pod \"community-operators-ngsxv\" (UID: \"a773c219-837b-4f2a-a595-3def4881351e\") " pod="openshift-marketplace/community-operators-ngsxv" Dec 05 17:51:06 crc kubenswrapper[4753]: I1205 17:51:06.123661 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a773c219-837b-4f2a-a595-3def4881351e-utilities\") pod \"community-operators-ngsxv\" (UID: \"a773c219-837b-4f2a-a595-3def4881351e\") " pod="openshift-marketplace/community-operators-ngsxv" Dec 05 17:51:06 crc kubenswrapper[4753]: I1205 17:51:06.145696 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bxm6m\" (UniqueName: \"kubernetes.io/projected/a773c219-837b-4f2a-a595-3def4881351e-kube-api-access-bxm6m\") pod \"community-operators-ngsxv\" (UID: \"a773c219-837b-4f2a-a595-3def4881351e\") " pod="openshift-marketplace/community-operators-ngsxv" Dec 05 17:51:06 crc kubenswrapper[4753]: I1205 17:51:06.202683 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ngsxv" Dec 05 17:51:06 crc kubenswrapper[4753]: I1205 17:51:06.853401 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-ngsxv"] Dec 05 17:51:06 crc kubenswrapper[4753]: W1205 17:51:06.854178 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda773c219_837b_4f2a_a595_3def4881351e.slice/crio-0a2a2a7d9ce708a33f8dad995eb0a3e8ad40c8147e27399896bc1a4050181bfc WatchSource:0}: Error finding container 0a2a2a7d9ce708a33f8dad995eb0a3e8ad40c8147e27399896bc1a4050181bfc: Status 404 returned error can't find the container with id 0a2a2a7d9ce708a33f8dad995eb0a3e8ad40c8147e27399896bc1a4050181bfc Dec 05 17:51:07 crc kubenswrapper[4753]: I1205 17:51:07.079715 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ngsxv" event={"ID":"a773c219-837b-4f2a-a595-3def4881351e","Type":"ContainerStarted","Data":"cbb8fcf4964ebb21d90480aea2431b6e1fad969325092cc745511b80d0d0b616"} Dec 05 17:51:07 crc kubenswrapper[4753]: I1205 17:51:07.080082 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ngsxv" event={"ID":"a773c219-837b-4f2a-a595-3def4881351e","Type":"ContainerStarted","Data":"0a2a2a7d9ce708a33f8dad995eb0a3e8ad40c8147e27399896bc1a4050181bfc"} Dec 05 17:51:08 crc kubenswrapper[4753]: I1205 17:51:08.088765 4753 generic.go:334] "Generic (PLEG): container finished" podID="a773c219-837b-4f2a-a595-3def4881351e" containerID="cbb8fcf4964ebb21d90480aea2431b6e1fad969325092cc745511b80d0d0b616" exitCode=0 Dec 05 17:51:08 crc kubenswrapper[4753]: I1205 17:51:08.088806 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ngsxv" event={"ID":"a773c219-837b-4f2a-a595-3def4881351e","Type":"ContainerDied","Data":"cbb8fcf4964ebb21d90480aea2431b6e1fad969325092cc745511b80d0d0b616"} Dec 05 17:51:08 crc kubenswrapper[4753]: I1205 17:51:08.506686 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-cvnpx" Dec 05 17:51:08 crc kubenswrapper[4753]: I1205 17:51:08.506988 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-cvnpx" Dec 05 17:51:09 crc kubenswrapper[4753]: I1205 17:51:09.099138 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ngsxv" event={"ID":"a773c219-837b-4f2a-a595-3def4881351e","Type":"ContainerStarted","Data":"bfb7fee10a72503b83143ffa29bb88b99d758fd6a0845e2953f56285fe82edf1"} Dec 05 17:51:09 crc kubenswrapper[4753]: I1205 17:51:09.557698 4753 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-cvnpx" podUID="419cf9c7-9e23-470a-bcbe-9094f639c3fc" containerName="registry-server" probeResult="failure" output=< Dec 05 17:51:09 crc kubenswrapper[4753]: timeout: failed to connect service ":50051" within 1s Dec 05 17:51:09 crc kubenswrapper[4753]: > Dec 05 17:51:11 crc kubenswrapper[4753]: I1205 17:51:11.123194 4753 generic.go:334] "Generic (PLEG): container finished" podID="a773c219-837b-4f2a-a595-3def4881351e" containerID="bfb7fee10a72503b83143ffa29bb88b99d758fd6a0845e2953f56285fe82edf1" exitCode=0 Dec 05 17:51:11 crc kubenswrapper[4753]: I1205 17:51:11.123286 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ngsxv" event={"ID":"a773c219-837b-4f2a-a595-3def4881351e","Type":"ContainerDied","Data":"bfb7fee10a72503b83143ffa29bb88b99d758fd6a0845e2953f56285fe82edf1"} Dec 05 17:51:12 crc kubenswrapper[4753]: I1205 17:51:12.132384 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ngsxv" event={"ID":"a773c219-837b-4f2a-a595-3def4881351e","Type":"ContainerStarted","Data":"cefb308a6ad303b2a88613291c78f23492d243eb32f64cd4c90cd13c859802d9"} Dec 05 17:51:12 crc kubenswrapper[4753]: I1205 17:51:12.165355 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-ngsxv" podStartSLOduration=3.699456091 podStartE2EDuration="7.165333416s" podCreationTimestamp="2025-12-05 17:51:05 +0000 UTC" firstStartedPulling="2025-12-05 17:51:08.091067083 +0000 UTC m=+2806.594174079" lastFinishedPulling="2025-12-05 17:51:11.556944398 +0000 UTC m=+2810.060051404" observedRunningTime="2025-12-05 17:51:12.157444853 +0000 UTC m=+2810.660551859" watchObservedRunningTime="2025-12-05 17:51:12.165333416 +0000 UTC m=+2810.668440422" Dec 05 17:51:16 crc kubenswrapper[4753]: I1205 17:51:16.202832 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-ngsxv" Dec 05 17:51:16 crc kubenswrapper[4753]: I1205 17:51:16.203106 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-ngsxv" Dec 05 17:51:16 crc kubenswrapper[4753]: I1205 17:51:16.247837 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-ngsxv" Dec 05 17:51:17 crc kubenswrapper[4753]: I1205 17:51:17.224838 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-ngsxv" Dec 05 17:51:17 crc kubenswrapper[4753]: I1205 17:51:17.278266 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-ngsxv"] Dec 05 17:51:18 crc kubenswrapper[4753]: I1205 17:51:18.562421 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-cvnpx" Dec 05 17:51:18 crc kubenswrapper[4753]: I1205 17:51:18.622565 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-cvnpx" Dec 05 17:51:18 crc kubenswrapper[4753]: I1205 17:51:18.886949 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-cvnpx"] Dec 05 17:51:19 crc kubenswrapper[4753]: I1205 17:51:19.201045 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-ngsxv" podUID="a773c219-837b-4f2a-a595-3def4881351e" containerName="registry-server" containerID="cri-o://cefb308a6ad303b2a88613291c78f23492d243eb32f64cd4c90cd13c859802d9" gracePeriod=2 Dec 05 17:51:19 crc kubenswrapper[4753]: I1205 17:51:19.970032 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ngsxv" Dec 05 17:51:20 crc kubenswrapper[4753]: I1205 17:51:20.046034 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a773c219-837b-4f2a-a595-3def4881351e-catalog-content\") pod \"a773c219-837b-4f2a-a595-3def4881351e\" (UID: \"a773c219-837b-4f2a-a595-3def4881351e\") " Dec 05 17:51:20 crc kubenswrapper[4753]: I1205 17:51:20.046112 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a773c219-837b-4f2a-a595-3def4881351e-utilities\") pod \"a773c219-837b-4f2a-a595-3def4881351e\" (UID: \"a773c219-837b-4f2a-a595-3def4881351e\") " Dec 05 17:51:20 crc kubenswrapper[4753]: I1205 17:51:20.046257 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bxm6m\" (UniqueName: \"kubernetes.io/projected/a773c219-837b-4f2a-a595-3def4881351e-kube-api-access-bxm6m\") pod \"a773c219-837b-4f2a-a595-3def4881351e\" (UID: \"a773c219-837b-4f2a-a595-3def4881351e\") " Dec 05 17:51:20 crc kubenswrapper[4753]: I1205 17:51:20.047918 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a773c219-837b-4f2a-a595-3def4881351e-utilities" (OuterVolumeSpecName: "utilities") pod "a773c219-837b-4f2a-a595-3def4881351e" (UID: "a773c219-837b-4f2a-a595-3def4881351e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:51:20 crc kubenswrapper[4753]: I1205 17:51:20.053426 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a773c219-837b-4f2a-a595-3def4881351e-kube-api-access-bxm6m" (OuterVolumeSpecName: "kube-api-access-bxm6m") pod "a773c219-837b-4f2a-a595-3def4881351e" (UID: "a773c219-837b-4f2a-a595-3def4881351e"). InnerVolumeSpecName "kube-api-access-bxm6m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:51:20 crc kubenswrapper[4753]: I1205 17:51:20.093396 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a773c219-837b-4f2a-a595-3def4881351e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a773c219-837b-4f2a-a595-3def4881351e" (UID: "a773c219-837b-4f2a-a595-3def4881351e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:51:20 crc kubenswrapper[4753]: I1205 17:51:20.148779 4753 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a773c219-837b-4f2a-a595-3def4881351e-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:20 crc kubenswrapper[4753]: I1205 17:51:20.148811 4753 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a773c219-837b-4f2a-a595-3def4881351e-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:20 crc kubenswrapper[4753]: I1205 17:51:20.148821 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bxm6m\" (UniqueName: \"kubernetes.io/projected/a773c219-837b-4f2a-a595-3def4881351e-kube-api-access-bxm6m\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:20 crc kubenswrapper[4753]: I1205 17:51:20.211366 4753 generic.go:334] "Generic (PLEG): container finished" podID="a773c219-837b-4f2a-a595-3def4881351e" containerID="cefb308a6ad303b2a88613291c78f23492d243eb32f64cd4c90cd13c859802d9" exitCode=0 Dec 05 17:51:20 crc kubenswrapper[4753]: I1205 17:51:20.211421 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ngsxv" event={"ID":"a773c219-837b-4f2a-a595-3def4881351e","Type":"ContainerDied","Data":"cefb308a6ad303b2a88613291c78f23492d243eb32f64cd4c90cd13c859802d9"} Dec 05 17:51:20 crc kubenswrapper[4753]: I1205 17:51:20.211444 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ngsxv" Dec 05 17:51:20 crc kubenswrapper[4753]: I1205 17:51:20.211477 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ngsxv" event={"ID":"a773c219-837b-4f2a-a595-3def4881351e","Type":"ContainerDied","Data":"0a2a2a7d9ce708a33f8dad995eb0a3e8ad40c8147e27399896bc1a4050181bfc"} Dec 05 17:51:20 crc kubenswrapper[4753]: I1205 17:51:20.211497 4753 scope.go:117] "RemoveContainer" containerID="cefb308a6ad303b2a88613291c78f23492d243eb32f64cd4c90cd13c859802d9" Dec 05 17:51:20 crc kubenswrapper[4753]: I1205 17:51:20.211789 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-cvnpx" podUID="419cf9c7-9e23-470a-bcbe-9094f639c3fc" containerName="registry-server" containerID="cri-o://0cde5b8f8c9fb7c97b79149c832a475b3b8febc080127fb439cf5f349c5b70e9" gracePeriod=2 Dec 05 17:51:20 crc kubenswrapper[4753]: I1205 17:51:20.236484 4753 scope.go:117] "RemoveContainer" containerID="bfb7fee10a72503b83143ffa29bb88b99d758fd6a0845e2953f56285fe82edf1" Dec 05 17:51:20 crc kubenswrapper[4753]: I1205 17:51:20.241444 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-ngsxv"] Dec 05 17:51:20 crc kubenswrapper[4753]: I1205 17:51:20.251053 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-ngsxv"] Dec 05 17:51:20 crc kubenswrapper[4753]: I1205 17:51:20.256387 4753 scope.go:117] "RemoveContainer" containerID="cbb8fcf4964ebb21d90480aea2431b6e1fad969325092cc745511b80d0d0b616" Dec 05 17:51:20 crc kubenswrapper[4753]: I1205 17:51:20.425112 4753 scope.go:117] "RemoveContainer" containerID="cefb308a6ad303b2a88613291c78f23492d243eb32f64cd4c90cd13c859802d9" Dec 05 17:51:20 crc kubenswrapper[4753]: E1205 17:51:20.425672 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cefb308a6ad303b2a88613291c78f23492d243eb32f64cd4c90cd13c859802d9\": container with ID starting with cefb308a6ad303b2a88613291c78f23492d243eb32f64cd4c90cd13c859802d9 not found: ID does not exist" containerID="cefb308a6ad303b2a88613291c78f23492d243eb32f64cd4c90cd13c859802d9" Dec 05 17:51:20 crc kubenswrapper[4753]: I1205 17:51:20.425707 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cefb308a6ad303b2a88613291c78f23492d243eb32f64cd4c90cd13c859802d9"} err="failed to get container status \"cefb308a6ad303b2a88613291c78f23492d243eb32f64cd4c90cd13c859802d9\": rpc error: code = NotFound desc = could not find container \"cefb308a6ad303b2a88613291c78f23492d243eb32f64cd4c90cd13c859802d9\": container with ID starting with cefb308a6ad303b2a88613291c78f23492d243eb32f64cd4c90cd13c859802d9 not found: ID does not exist" Dec 05 17:51:20 crc kubenswrapper[4753]: I1205 17:51:20.425730 4753 scope.go:117] "RemoveContainer" containerID="bfb7fee10a72503b83143ffa29bb88b99d758fd6a0845e2953f56285fe82edf1" Dec 05 17:51:20 crc kubenswrapper[4753]: E1205 17:51:20.426068 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bfb7fee10a72503b83143ffa29bb88b99d758fd6a0845e2953f56285fe82edf1\": container with ID starting with bfb7fee10a72503b83143ffa29bb88b99d758fd6a0845e2953f56285fe82edf1 not found: ID does not exist" containerID="bfb7fee10a72503b83143ffa29bb88b99d758fd6a0845e2953f56285fe82edf1" Dec 05 17:51:20 crc kubenswrapper[4753]: I1205 17:51:20.426117 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bfb7fee10a72503b83143ffa29bb88b99d758fd6a0845e2953f56285fe82edf1"} err="failed to get container status \"bfb7fee10a72503b83143ffa29bb88b99d758fd6a0845e2953f56285fe82edf1\": rpc error: code = NotFound desc = could not find container \"bfb7fee10a72503b83143ffa29bb88b99d758fd6a0845e2953f56285fe82edf1\": container with ID starting with bfb7fee10a72503b83143ffa29bb88b99d758fd6a0845e2953f56285fe82edf1 not found: ID does not exist" Dec 05 17:51:20 crc kubenswrapper[4753]: I1205 17:51:20.426167 4753 scope.go:117] "RemoveContainer" containerID="cbb8fcf4964ebb21d90480aea2431b6e1fad969325092cc745511b80d0d0b616" Dec 05 17:51:20 crc kubenswrapper[4753]: E1205 17:51:20.426564 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cbb8fcf4964ebb21d90480aea2431b6e1fad969325092cc745511b80d0d0b616\": container with ID starting with cbb8fcf4964ebb21d90480aea2431b6e1fad969325092cc745511b80d0d0b616 not found: ID does not exist" containerID="cbb8fcf4964ebb21d90480aea2431b6e1fad969325092cc745511b80d0d0b616" Dec 05 17:51:20 crc kubenswrapper[4753]: I1205 17:51:20.426639 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cbb8fcf4964ebb21d90480aea2431b6e1fad969325092cc745511b80d0d0b616"} err="failed to get container status \"cbb8fcf4964ebb21d90480aea2431b6e1fad969325092cc745511b80d0d0b616\": rpc error: code = NotFound desc = could not find container \"cbb8fcf4964ebb21d90480aea2431b6e1fad969325092cc745511b80d0d0b616\": container with ID starting with cbb8fcf4964ebb21d90480aea2431b6e1fad969325092cc745511b80d0d0b616 not found: ID does not exist" Dec 05 17:51:20 crc kubenswrapper[4753]: I1205 17:51:20.813528 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cvnpx" Dec 05 17:51:20 crc kubenswrapper[4753]: I1205 17:51:20.869620 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/419cf9c7-9e23-470a-bcbe-9094f639c3fc-catalog-content\") pod \"419cf9c7-9e23-470a-bcbe-9094f639c3fc\" (UID: \"419cf9c7-9e23-470a-bcbe-9094f639c3fc\") " Dec 05 17:51:20 crc kubenswrapper[4753]: I1205 17:51:20.869693 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/419cf9c7-9e23-470a-bcbe-9094f639c3fc-utilities\") pod \"419cf9c7-9e23-470a-bcbe-9094f639c3fc\" (UID: \"419cf9c7-9e23-470a-bcbe-9094f639c3fc\") " Dec 05 17:51:20 crc kubenswrapper[4753]: I1205 17:51:20.869773 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j874z\" (UniqueName: \"kubernetes.io/projected/419cf9c7-9e23-470a-bcbe-9094f639c3fc-kube-api-access-j874z\") pod \"419cf9c7-9e23-470a-bcbe-9094f639c3fc\" (UID: \"419cf9c7-9e23-470a-bcbe-9094f639c3fc\") " Dec 05 17:51:20 crc kubenswrapper[4753]: I1205 17:51:20.871963 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/419cf9c7-9e23-470a-bcbe-9094f639c3fc-utilities" (OuterVolumeSpecName: "utilities") pod "419cf9c7-9e23-470a-bcbe-9094f639c3fc" (UID: "419cf9c7-9e23-470a-bcbe-9094f639c3fc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:51:20 crc kubenswrapper[4753]: I1205 17:51:20.873023 4753 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/419cf9c7-9e23-470a-bcbe-9094f639c3fc-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:20 crc kubenswrapper[4753]: I1205 17:51:20.882968 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/419cf9c7-9e23-470a-bcbe-9094f639c3fc-kube-api-access-j874z" (OuterVolumeSpecName: "kube-api-access-j874z") pod "419cf9c7-9e23-470a-bcbe-9094f639c3fc" (UID: "419cf9c7-9e23-470a-bcbe-9094f639c3fc"). InnerVolumeSpecName "kube-api-access-j874z". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:51:20 crc kubenswrapper[4753]: I1205 17:51:20.975398 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j874z\" (UniqueName: \"kubernetes.io/projected/419cf9c7-9e23-470a-bcbe-9094f639c3fc-kube-api-access-j874z\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:21 crc kubenswrapper[4753]: I1205 17:51:21.015662 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/419cf9c7-9e23-470a-bcbe-9094f639c3fc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "419cf9c7-9e23-470a-bcbe-9094f639c3fc" (UID: "419cf9c7-9e23-470a-bcbe-9094f639c3fc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:51:21 crc kubenswrapper[4753]: I1205 17:51:21.078075 4753 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/419cf9c7-9e23-470a-bcbe-9094f639c3fc-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:21 crc kubenswrapper[4753]: I1205 17:51:21.226821 4753 generic.go:334] "Generic (PLEG): container finished" podID="419cf9c7-9e23-470a-bcbe-9094f639c3fc" containerID="0cde5b8f8c9fb7c97b79149c832a475b3b8febc080127fb439cf5f349c5b70e9" exitCode=0 Dec 05 17:51:21 crc kubenswrapper[4753]: I1205 17:51:21.226873 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cvnpx" event={"ID":"419cf9c7-9e23-470a-bcbe-9094f639c3fc","Type":"ContainerDied","Data":"0cde5b8f8c9fb7c97b79149c832a475b3b8febc080127fb439cf5f349c5b70e9"} Dec 05 17:51:21 crc kubenswrapper[4753]: I1205 17:51:21.226905 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cvnpx" event={"ID":"419cf9c7-9e23-470a-bcbe-9094f639c3fc","Type":"ContainerDied","Data":"6271608f93ba4aa8ee6f21f09a69772dac2d689b1b0e95be52240ef6f56706d8"} Dec 05 17:51:21 crc kubenswrapper[4753]: I1205 17:51:21.226927 4753 scope.go:117] "RemoveContainer" containerID="0cde5b8f8c9fb7c97b79149c832a475b3b8febc080127fb439cf5f349c5b70e9" Dec 05 17:51:21 crc kubenswrapper[4753]: I1205 17:51:21.227421 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cvnpx" Dec 05 17:51:21 crc kubenswrapper[4753]: I1205 17:51:21.247215 4753 scope.go:117] "RemoveContainer" containerID="6c64bad39a5325aaa1fcdc0e337db392f727f8008ceb236bd681edf28e9e7b98" Dec 05 17:51:21 crc kubenswrapper[4753]: I1205 17:51:21.276006 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-cvnpx"] Dec 05 17:51:21 crc kubenswrapper[4753]: I1205 17:51:21.285722 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-cvnpx"] Dec 05 17:51:21 crc kubenswrapper[4753]: I1205 17:51:21.286271 4753 scope.go:117] "RemoveContainer" containerID="8b99004559aa6f5bc6b7d3c3c4ddd07c01ce5db51022db2cf4363f48cdd3e4ce" Dec 05 17:51:21 crc kubenswrapper[4753]: I1205 17:51:21.317429 4753 scope.go:117] "RemoveContainer" containerID="0cde5b8f8c9fb7c97b79149c832a475b3b8febc080127fb439cf5f349c5b70e9" Dec 05 17:51:21 crc kubenswrapper[4753]: E1205 17:51:21.317815 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0cde5b8f8c9fb7c97b79149c832a475b3b8febc080127fb439cf5f349c5b70e9\": container with ID starting with 0cde5b8f8c9fb7c97b79149c832a475b3b8febc080127fb439cf5f349c5b70e9 not found: ID does not exist" containerID="0cde5b8f8c9fb7c97b79149c832a475b3b8febc080127fb439cf5f349c5b70e9" Dec 05 17:51:21 crc kubenswrapper[4753]: I1205 17:51:21.317847 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0cde5b8f8c9fb7c97b79149c832a475b3b8febc080127fb439cf5f349c5b70e9"} err="failed to get container status \"0cde5b8f8c9fb7c97b79149c832a475b3b8febc080127fb439cf5f349c5b70e9\": rpc error: code = NotFound desc = could not find container \"0cde5b8f8c9fb7c97b79149c832a475b3b8febc080127fb439cf5f349c5b70e9\": container with ID starting with 0cde5b8f8c9fb7c97b79149c832a475b3b8febc080127fb439cf5f349c5b70e9 not found: ID does not exist" Dec 05 17:51:21 crc kubenswrapper[4753]: I1205 17:51:21.317870 4753 scope.go:117] "RemoveContainer" containerID="6c64bad39a5325aaa1fcdc0e337db392f727f8008ceb236bd681edf28e9e7b98" Dec 05 17:51:21 crc kubenswrapper[4753]: E1205 17:51:21.318131 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6c64bad39a5325aaa1fcdc0e337db392f727f8008ceb236bd681edf28e9e7b98\": container with ID starting with 6c64bad39a5325aaa1fcdc0e337db392f727f8008ceb236bd681edf28e9e7b98 not found: ID does not exist" containerID="6c64bad39a5325aaa1fcdc0e337db392f727f8008ceb236bd681edf28e9e7b98" Dec 05 17:51:21 crc kubenswrapper[4753]: I1205 17:51:21.318184 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6c64bad39a5325aaa1fcdc0e337db392f727f8008ceb236bd681edf28e9e7b98"} err="failed to get container status \"6c64bad39a5325aaa1fcdc0e337db392f727f8008ceb236bd681edf28e9e7b98\": rpc error: code = NotFound desc = could not find container \"6c64bad39a5325aaa1fcdc0e337db392f727f8008ceb236bd681edf28e9e7b98\": container with ID starting with 6c64bad39a5325aaa1fcdc0e337db392f727f8008ceb236bd681edf28e9e7b98 not found: ID does not exist" Dec 05 17:51:21 crc kubenswrapper[4753]: I1205 17:51:21.318204 4753 scope.go:117] "RemoveContainer" containerID="8b99004559aa6f5bc6b7d3c3c4ddd07c01ce5db51022db2cf4363f48cdd3e4ce" Dec 05 17:51:21 crc kubenswrapper[4753]: E1205 17:51:21.318517 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8b99004559aa6f5bc6b7d3c3c4ddd07c01ce5db51022db2cf4363f48cdd3e4ce\": container with ID starting with 8b99004559aa6f5bc6b7d3c3c4ddd07c01ce5db51022db2cf4363f48cdd3e4ce not found: ID does not exist" containerID="8b99004559aa6f5bc6b7d3c3c4ddd07c01ce5db51022db2cf4363f48cdd3e4ce" Dec 05 17:51:21 crc kubenswrapper[4753]: I1205 17:51:21.318537 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8b99004559aa6f5bc6b7d3c3c4ddd07c01ce5db51022db2cf4363f48cdd3e4ce"} err="failed to get container status \"8b99004559aa6f5bc6b7d3c3c4ddd07c01ce5db51022db2cf4363f48cdd3e4ce\": rpc error: code = NotFound desc = could not find container \"8b99004559aa6f5bc6b7d3c3c4ddd07c01ce5db51022db2cf4363f48cdd3e4ce\": container with ID starting with 8b99004559aa6f5bc6b7d3c3c4ddd07c01ce5db51022db2cf4363f48cdd3e4ce not found: ID does not exist" Dec 05 17:51:21 crc kubenswrapper[4753]: I1205 17:51:21.732753 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="419cf9c7-9e23-470a-bcbe-9094f639c3fc" path="/var/lib/kubelet/pods/419cf9c7-9e23-470a-bcbe-9094f639c3fc/volumes" Dec 05 17:51:21 crc kubenswrapper[4753]: I1205 17:51:21.733760 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a773c219-837b-4f2a-a595-3def4881351e" path="/var/lib/kubelet/pods/a773c219-837b-4f2a-a595-3def4881351e/volumes" Dec 05 17:51:29 crc kubenswrapper[4753]: I1205 17:51:29.981592 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-8hn5s"] Dec 05 17:51:29 crc kubenswrapper[4753]: E1205 17:51:29.982777 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="419cf9c7-9e23-470a-bcbe-9094f639c3fc" containerName="registry-server" Dec 05 17:51:29 crc kubenswrapper[4753]: I1205 17:51:29.982791 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="419cf9c7-9e23-470a-bcbe-9094f639c3fc" containerName="registry-server" Dec 05 17:51:29 crc kubenswrapper[4753]: E1205 17:51:29.982806 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a773c219-837b-4f2a-a595-3def4881351e" containerName="registry-server" Dec 05 17:51:29 crc kubenswrapper[4753]: I1205 17:51:29.982814 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="a773c219-837b-4f2a-a595-3def4881351e" containerName="registry-server" Dec 05 17:51:29 crc kubenswrapper[4753]: E1205 17:51:29.982831 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="419cf9c7-9e23-470a-bcbe-9094f639c3fc" containerName="extract-content" Dec 05 17:51:29 crc kubenswrapper[4753]: I1205 17:51:29.982839 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="419cf9c7-9e23-470a-bcbe-9094f639c3fc" containerName="extract-content" Dec 05 17:51:29 crc kubenswrapper[4753]: E1205 17:51:29.982859 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a773c219-837b-4f2a-a595-3def4881351e" containerName="extract-content" Dec 05 17:51:29 crc kubenswrapper[4753]: I1205 17:51:29.982866 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="a773c219-837b-4f2a-a595-3def4881351e" containerName="extract-content" Dec 05 17:51:29 crc kubenswrapper[4753]: E1205 17:51:29.982886 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a773c219-837b-4f2a-a595-3def4881351e" containerName="extract-utilities" Dec 05 17:51:29 crc kubenswrapper[4753]: I1205 17:51:29.982893 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="a773c219-837b-4f2a-a595-3def4881351e" containerName="extract-utilities" Dec 05 17:51:29 crc kubenswrapper[4753]: E1205 17:51:29.982918 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="419cf9c7-9e23-470a-bcbe-9094f639c3fc" containerName="extract-utilities" Dec 05 17:51:29 crc kubenswrapper[4753]: I1205 17:51:29.982926 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="419cf9c7-9e23-470a-bcbe-9094f639c3fc" containerName="extract-utilities" Dec 05 17:51:29 crc kubenswrapper[4753]: I1205 17:51:29.983193 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="a773c219-837b-4f2a-a595-3def4881351e" containerName="registry-server" Dec 05 17:51:29 crc kubenswrapper[4753]: I1205 17:51:29.983210 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="419cf9c7-9e23-470a-bcbe-9094f639c3fc" containerName="registry-server" Dec 05 17:51:29 crc kubenswrapper[4753]: I1205 17:51:29.985084 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8hn5s" Dec 05 17:51:30 crc kubenswrapper[4753]: I1205 17:51:30.004009 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-8hn5s"] Dec 05 17:51:30 crc kubenswrapper[4753]: I1205 17:51:30.094441 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kxqct\" (UniqueName: \"kubernetes.io/projected/b742d333-3e26-4c8c-bde6-357ad72ec798-kube-api-access-kxqct\") pod \"certified-operators-8hn5s\" (UID: \"b742d333-3e26-4c8c-bde6-357ad72ec798\") " pod="openshift-marketplace/certified-operators-8hn5s" Dec 05 17:51:30 crc kubenswrapper[4753]: I1205 17:51:30.094492 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b742d333-3e26-4c8c-bde6-357ad72ec798-utilities\") pod \"certified-operators-8hn5s\" (UID: \"b742d333-3e26-4c8c-bde6-357ad72ec798\") " pod="openshift-marketplace/certified-operators-8hn5s" Dec 05 17:51:30 crc kubenswrapper[4753]: I1205 17:51:30.094524 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b742d333-3e26-4c8c-bde6-357ad72ec798-catalog-content\") pod \"certified-operators-8hn5s\" (UID: \"b742d333-3e26-4c8c-bde6-357ad72ec798\") " pod="openshift-marketplace/certified-operators-8hn5s" Dec 05 17:51:30 crc kubenswrapper[4753]: I1205 17:51:30.196060 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kxqct\" (UniqueName: \"kubernetes.io/projected/b742d333-3e26-4c8c-bde6-357ad72ec798-kube-api-access-kxqct\") pod \"certified-operators-8hn5s\" (UID: \"b742d333-3e26-4c8c-bde6-357ad72ec798\") " pod="openshift-marketplace/certified-operators-8hn5s" Dec 05 17:51:30 crc kubenswrapper[4753]: I1205 17:51:30.196112 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b742d333-3e26-4c8c-bde6-357ad72ec798-utilities\") pod \"certified-operators-8hn5s\" (UID: \"b742d333-3e26-4c8c-bde6-357ad72ec798\") " pod="openshift-marketplace/certified-operators-8hn5s" Dec 05 17:51:30 crc kubenswrapper[4753]: I1205 17:51:30.196141 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b742d333-3e26-4c8c-bde6-357ad72ec798-catalog-content\") pod \"certified-operators-8hn5s\" (UID: \"b742d333-3e26-4c8c-bde6-357ad72ec798\") " pod="openshift-marketplace/certified-operators-8hn5s" Dec 05 17:51:30 crc kubenswrapper[4753]: I1205 17:51:30.196664 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b742d333-3e26-4c8c-bde6-357ad72ec798-catalog-content\") pod \"certified-operators-8hn5s\" (UID: \"b742d333-3e26-4c8c-bde6-357ad72ec798\") " pod="openshift-marketplace/certified-operators-8hn5s" Dec 05 17:51:30 crc kubenswrapper[4753]: I1205 17:51:30.196739 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b742d333-3e26-4c8c-bde6-357ad72ec798-utilities\") pod \"certified-operators-8hn5s\" (UID: \"b742d333-3e26-4c8c-bde6-357ad72ec798\") " pod="openshift-marketplace/certified-operators-8hn5s" Dec 05 17:51:30 crc kubenswrapper[4753]: I1205 17:51:30.217663 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kxqct\" (UniqueName: \"kubernetes.io/projected/b742d333-3e26-4c8c-bde6-357ad72ec798-kube-api-access-kxqct\") pod \"certified-operators-8hn5s\" (UID: \"b742d333-3e26-4c8c-bde6-357ad72ec798\") " pod="openshift-marketplace/certified-operators-8hn5s" Dec 05 17:51:30 crc kubenswrapper[4753]: I1205 17:51:30.326341 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8hn5s" Dec 05 17:51:30 crc kubenswrapper[4753]: I1205 17:51:30.884403 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-8hn5s"] Dec 05 17:51:31 crc kubenswrapper[4753]: I1205 17:51:31.333648 4753 generic.go:334] "Generic (PLEG): container finished" podID="b742d333-3e26-4c8c-bde6-357ad72ec798" containerID="a2eac17f57a727fccc0c97bab50561d8350868a539197e25cc2e792a4c4e711f" exitCode=0 Dec 05 17:51:31 crc kubenswrapper[4753]: I1205 17:51:31.333720 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8hn5s" event={"ID":"b742d333-3e26-4c8c-bde6-357ad72ec798","Type":"ContainerDied","Data":"a2eac17f57a727fccc0c97bab50561d8350868a539197e25cc2e792a4c4e711f"} Dec 05 17:51:31 crc kubenswrapper[4753]: I1205 17:51:31.333754 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8hn5s" event={"ID":"b742d333-3e26-4c8c-bde6-357ad72ec798","Type":"ContainerStarted","Data":"1d916ef48bc07b2c0ebf930fd305ad05c148a25c6503b52dcfadfee56b21ebfc"} Dec 05 17:51:32 crc kubenswrapper[4753]: I1205 17:51:32.345739 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8hn5s" event={"ID":"b742d333-3e26-4c8c-bde6-357ad72ec798","Type":"ContainerStarted","Data":"1b335780e686b3f5ead5f2f55c082212b1d405aaf91a86411a831655d3458050"} Dec 05 17:51:33 crc kubenswrapper[4753]: E1205 17:51:33.129984 4753 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb742d333_3e26_4c8c_bde6_357ad72ec798.slice/crio-1b335780e686b3f5ead5f2f55c082212b1d405aaf91a86411a831655d3458050.scope\": RecentStats: unable to find data in memory cache]" Dec 05 17:51:33 crc kubenswrapper[4753]: I1205 17:51:33.361610 4753 generic.go:334] "Generic (PLEG): container finished" podID="b742d333-3e26-4c8c-bde6-357ad72ec798" containerID="1b335780e686b3f5ead5f2f55c082212b1d405aaf91a86411a831655d3458050" exitCode=0 Dec 05 17:51:33 crc kubenswrapper[4753]: I1205 17:51:33.361649 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8hn5s" event={"ID":"b742d333-3e26-4c8c-bde6-357ad72ec798","Type":"ContainerDied","Data":"1b335780e686b3f5ead5f2f55c082212b1d405aaf91a86411a831655d3458050"} Dec 05 17:51:34 crc kubenswrapper[4753]: I1205 17:51:34.378715 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8hn5s" event={"ID":"b742d333-3e26-4c8c-bde6-357ad72ec798","Type":"ContainerStarted","Data":"4ffb9a38b855c7a3e2b5bb3da59171c42328d18000accad6425cab67e7bb3a68"} Dec 05 17:51:34 crc kubenswrapper[4753]: I1205 17:51:34.417719 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-8hn5s" podStartSLOduration=2.804819342 podStartE2EDuration="5.417696849s" podCreationTimestamp="2025-12-05 17:51:29 +0000 UTC" firstStartedPulling="2025-12-05 17:51:31.337572966 +0000 UTC m=+2829.840680012" lastFinishedPulling="2025-12-05 17:51:33.950450503 +0000 UTC m=+2832.453557519" observedRunningTime="2025-12-05 17:51:34.410747202 +0000 UTC m=+2832.913854258" watchObservedRunningTime="2025-12-05 17:51:34.417696849 +0000 UTC m=+2832.920803865" Dec 05 17:51:40 crc kubenswrapper[4753]: I1205 17:51:40.328370 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-8hn5s" Dec 05 17:51:40 crc kubenswrapper[4753]: I1205 17:51:40.329954 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-8hn5s" Dec 05 17:51:40 crc kubenswrapper[4753]: I1205 17:51:40.401975 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-8hn5s" Dec 05 17:51:40 crc kubenswrapper[4753]: I1205 17:51:40.485759 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-8hn5s" Dec 05 17:51:40 crc kubenswrapper[4753]: I1205 17:51:40.640432 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-8hn5s"] Dec 05 17:51:42 crc kubenswrapper[4753]: I1205 17:51:42.477532 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-8hn5s" podUID="b742d333-3e26-4c8c-bde6-357ad72ec798" containerName="registry-server" containerID="cri-o://4ffb9a38b855c7a3e2b5bb3da59171c42328d18000accad6425cab67e7bb3a68" gracePeriod=2 Dec 05 17:51:43 crc kubenswrapper[4753]: I1205 17:51:43.084806 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8hn5s" Dec 05 17:51:43 crc kubenswrapper[4753]: I1205 17:51:43.226929 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kxqct\" (UniqueName: \"kubernetes.io/projected/b742d333-3e26-4c8c-bde6-357ad72ec798-kube-api-access-kxqct\") pod \"b742d333-3e26-4c8c-bde6-357ad72ec798\" (UID: \"b742d333-3e26-4c8c-bde6-357ad72ec798\") " Dec 05 17:51:43 crc kubenswrapper[4753]: I1205 17:51:43.227242 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b742d333-3e26-4c8c-bde6-357ad72ec798-catalog-content\") pod \"b742d333-3e26-4c8c-bde6-357ad72ec798\" (UID: \"b742d333-3e26-4c8c-bde6-357ad72ec798\") " Dec 05 17:51:43 crc kubenswrapper[4753]: I1205 17:51:43.227408 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b742d333-3e26-4c8c-bde6-357ad72ec798-utilities\") pod \"b742d333-3e26-4c8c-bde6-357ad72ec798\" (UID: \"b742d333-3e26-4c8c-bde6-357ad72ec798\") " Dec 05 17:51:43 crc kubenswrapper[4753]: I1205 17:51:43.228614 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b742d333-3e26-4c8c-bde6-357ad72ec798-utilities" (OuterVolumeSpecName: "utilities") pod "b742d333-3e26-4c8c-bde6-357ad72ec798" (UID: "b742d333-3e26-4c8c-bde6-357ad72ec798"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:51:43 crc kubenswrapper[4753]: I1205 17:51:43.233463 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b742d333-3e26-4c8c-bde6-357ad72ec798-kube-api-access-kxqct" (OuterVolumeSpecName: "kube-api-access-kxqct") pod "b742d333-3e26-4c8c-bde6-357ad72ec798" (UID: "b742d333-3e26-4c8c-bde6-357ad72ec798"). InnerVolumeSpecName "kube-api-access-kxqct". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:51:43 crc kubenswrapper[4753]: I1205 17:51:43.303795 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b742d333-3e26-4c8c-bde6-357ad72ec798-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b742d333-3e26-4c8c-bde6-357ad72ec798" (UID: "b742d333-3e26-4c8c-bde6-357ad72ec798"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:51:43 crc kubenswrapper[4753]: I1205 17:51:43.329535 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kxqct\" (UniqueName: \"kubernetes.io/projected/b742d333-3e26-4c8c-bde6-357ad72ec798-kube-api-access-kxqct\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:43 crc kubenswrapper[4753]: I1205 17:51:43.329569 4753 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b742d333-3e26-4c8c-bde6-357ad72ec798-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:43 crc kubenswrapper[4753]: I1205 17:51:43.329583 4753 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b742d333-3e26-4c8c-bde6-357ad72ec798-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:43 crc kubenswrapper[4753]: I1205 17:51:43.513503 4753 generic.go:334] "Generic (PLEG): container finished" podID="b742d333-3e26-4c8c-bde6-357ad72ec798" containerID="4ffb9a38b855c7a3e2b5bb3da59171c42328d18000accad6425cab67e7bb3a68" exitCode=0 Dec 05 17:51:43 crc kubenswrapper[4753]: I1205 17:51:43.513572 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8hn5s" event={"ID":"b742d333-3e26-4c8c-bde6-357ad72ec798","Type":"ContainerDied","Data":"4ffb9a38b855c7a3e2b5bb3da59171c42328d18000accad6425cab67e7bb3a68"} Dec 05 17:51:43 crc kubenswrapper[4753]: I1205 17:51:43.513589 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8hn5s" Dec 05 17:51:43 crc kubenswrapper[4753]: I1205 17:51:43.513610 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8hn5s" event={"ID":"b742d333-3e26-4c8c-bde6-357ad72ec798","Type":"ContainerDied","Data":"1d916ef48bc07b2c0ebf930fd305ad05c148a25c6503b52dcfadfee56b21ebfc"} Dec 05 17:51:43 crc kubenswrapper[4753]: I1205 17:51:43.513662 4753 scope.go:117] "RemoveContainer" containerID="4ffb9a38b855c7a3e2b5bb3da59171c42328d18000accad6425cab67e7bb3a68" Dec 05 17:51:43 crc kubenswrapper[4753]: I1205 17:51:43.564491 4753 scope.go:117] "RemoveContainer" containerID="1b335780e686b3f5ead5f2f55c082212b1d405aaf91a86411a831655d3458050" Dec 05 17:51:43 crc kubenswrapper[4753]: I1205 17:51:43.567622 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-8hn5s"] Dec 05 17:51:43 crc kubenswrapper[4753]: I1205 17:51:43.582480 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-8hn5s"] Dec 05 17:51:43 crc kubenswrapper[4753]: I1205 17:51:43.588405 4753 scope.go:117] "RemoveContainer" containerID="a2eac17f57a727fccc0c97bab50561d8350868a539197e25cc2e792a4c4e711f" Dec 05 17:51:43 crc kubenswrapper[4753]: I1205 17:51:43.650229 4753 scope.go:117] "RemoveContainer" containerID="4ffb9a38b855c7a3e2b5bb3da59171c42328d18000accad6425cab67e7bb3a68" Dec 05 17:51:43 crc kubenswrapper[4753]: E1205 17:51:43.650630 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4ffb9a38b855c7a3e2b5bb3da59171c42328d18000accad6425cab67e7bb3a68\": container with ID starting with 4ffb9a38b855c7a3e2b5bb3da59171c42328d18000accad6425cab67e7bb3a68 not found: ID does not exist" containerID="4ffb9a38b855c7a3e2b5bb3da59171c42328d18000accad6425cab67e7bb3a68" Dec 05 17:51:43 crc kubenswrapper[4753]: I1205 17:51:43.650667 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4ffb9a38b855c7a3e2b5bb3da59171c42328d18000accad6425cab67e7bb3a68"} err="failed to get container status \"4ffb9a38b855c7a3e2b5bb3da59171c42328d18000accad6425cab67e7bb3a68\": rpc error: code = NotFound desc = could not find container \"4ffb9a38b855c7a3e2b5bb3da59171c42328d18000accad6425cab67e7bb3a68\": container with ID starting with 4ffb9a38b855c7a3e2b5bb3da59171c42328d18000accad6425cab67e7bb3a68 not found: ID does not exist" Dec 05 17:51:43 crc kubenswrapper[4753]: I1205 17:51:43.650693 4753 scope.go:117] "RemoveContainer" containerID="1b335780e686b3f5ead5f2f55c082212b1d405aaf91a86411a831655d3458050" Dec 05 17:51:43 crc kubenswrapper[4753]: E1205 17:51:43.651049 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1b335780e686b3f5ead5f2f55c082212b1d405aaf91a86411a831655d3458050\": container with ID starting with 1b335780e686b3f5ead5f2f55c082212b1d405aaf91a86411a831655d3458050 not found: ID does not exist" containerID="1b335780e686b3f5ead5f2f55c082212b1d405aaf91a86411a831655d3458050" Dec 05 17:51:43 crc kubenswrapper[4753]: I1205 17:51:43.651112 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1b335780e686b3f5ead5f2f55c082212b1d405aaf91a86411a831655d3458050"} err="failed to get container status \"1b335780e686b3f5ead5f2f55c082212b1d405aaf91a86411a831655d3458050\": rpc error: code = NotFound desc = could not find container \"1b335780e686b3f5ead5f2f55c082212b1d405aaf91a86411a831655d3458050\": container with ID starting with 1b335780e686b3f5ead5f2f55c082212b1d405aaf91a86411a831655d3458050 not found: ID does not exist" Dec 05 17:51:43 crc kubenswrapper[4753]: I1205 17:51:43.651138 4753 scope.go:117] "RemoveContainer" containerID="a2eac17f57a727fccc0c97bab50561d8350868a539197e25cc2e792a4c4e711f" Dec 05 17:51:43 crc kubenswrapper[4753]: E1205 17:51:43.651633 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a2eac17f57a727fccc0c97bab50561d8350868a539197e25cc2e792a4c4e711f\": container with ID starting with a2eac17f57a727fccc0c97bab50561d8350868a539197e25cc2e792a4c4e711f not found: ID does not exist" containerID="a2eac17f57a727fccc0c97bab50561d8350868a539197e25cc2e792a4c4e711f" Dec 05 17:51:43 crc kubenswrapper[4753]: I1205 17:51:43.651684 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a2eac17f57a727fccc0c97bab50561d8350868a539197e25cc2e792a4c4e711f"} err="failed to get container status \"a2eac17f57a727fccc0c97bab50561d8350868a539197e25cc2e792a4c4e711f\": rpc error: code = NotFound desc = could not find container \"a2eac17f57a727fccc0c97bab50561d8350868a539197e25cc2e792a4c4e711f\": container with ID starting with a2eac17f57a727fccc0c97bab50561d8350868a539197e25cc2e792a4c4e711f not found: ID does not exist" Dec 05 17:51:43 crc kubenswrapper[4753]: I1205 17:51:43.734726 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b742d333-3e26-4c8c-bde6-357ad72ec798" path="/var/lib/kubelet/pods/b742d333-3e26-4c8c-bde6-357ad72ec798/volumes" Dec 05 17:51:52 crc kubenswrapper[4753]: I1205 17:51:52.605442 4753 generic.go:334] "Generic (PLEG): container finished" podID="e2f8ca40-16d5-4a17-80a1-f5bf12f92d71" containerID="67075ea2c036b43a94e47bc94ba85b1ce8967cf96ecc5469cc7915bfd40d9f4a" exitCode=0 Dec 05 17:51:52 crc kubenswrapper[4753]: I1205 17:51:52.605520 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-kx88v" event={"ID":"e2f8ca40-16d5-4a17-80a1-f5bf12f92d71","Type":"ContainerDied","Data":"67075ea2c036b43a94e47bc94ba85b1ce8967cf96ecc5469cc7915bfd40d9f4a"} Dec 05 17:51:54 crc kubenswrapper[4753]: I1205 17:51:54.152789 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-kx88v" Dec 05 17:51:54 crc kubenswrapper[4753]: I1205 17:51:54.269795 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/e2f8ca40-16d5-4a17-80a1-f5bf12f92d71-ovncontroller-config-0\") pod \"e2f8ca40-16d5-4a17-80a1-f5bf12f92d71\" (UID: \"e2f8ca40-16d5-4a17-80a1-f5bf12f92d71\") " Dec 05 17:51:54 crc kubenswrapper[4753]: I1205 17:51:54.270018 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2f8ca40-16d5-4a17-80a1-f5bf12f92d71-ovn-combined-ca-bundle\") pod \"e2f8ca40-16d5-4a17-80a1-f5bf12f92d71\" (UID: \"e2f8ca40-16d5-4a17-80a1-f5bf12f92d71\") " Dec 05 17:51:54 crc kubenswrapper[4753]: I1205 17:51:54.270069 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e2f8ca40-16d5-4a17-80a1-f5bf12f92d71-ssh-key\") pod \"e2f8ca40-16d5-4a17-80a1-f5bf12f92d71\" (UID: \"e2f8ca40-16d5-4a17-80a1-f5bf12f92d71\") " Dec 05 17:51:54 crc kubenswrapper[4753]: I1205 17:51:54.270321 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e2f8ca40-16d5-4a17-80a1-f5bf12f92d71-inventory\") pod \"e2f8ca40-16d5-4a17-80a1-f5bf12f92d71\" (UID: \"e2f8ca40-16d5-4a17-80a1-f5bf12f92d71\") " Dec 05 17:51:54 crc kubenswrapper[4753]: I1205 17:51:54.270435 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zdhvt\" (UniqueName: \"kubernetes.io/projected/e2f8ca40-16d5-4a17-80a1-f5bf12f92d71-kube-api-access-zdhvt\") pod \"e2f8ca40-16d5-4a17-80a1-f5bf12f92d71\" (UID: \"e2f8ca40-16d5-4a17-80a1-f5bf12f92d71\") " Dec 05 17:51:54 crc kubenswrapper[4753]: I1205 17:51:54.275782 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e2f8ca40-16d5-4a17-80a1-f5bf12f92d71-kube-api-access-zdhvt" (OuterVolumeSpecName: "kube-api-access-zdhvt") pod "e2f8ca40-16d5-4a17-80a1-f5bf12f92d71" (UID: "e2f8ca40-16d5-4a17-80a1-f5bf12f92d71"). InnerVolumeSpecName "kube-api-access-zdhvt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:51:54 crc kubenswrapper[4753]: I1205 17:51:54.276795 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e2f8ca40-16d5-4a17-80a1-f5bf12f92d71-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "e2f8ca40-16d5-4a17-80a1-f5bf12f92d71" (UID: "e2f8ca40-16d5-4a17-80a1-f5bf12f92d71"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:51:54 crc kubenswrapper[4753]: I1205 17:51:54.300353 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e2f8ca40-16d5-4a17-80a1-f5bf12f92d71-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "e2f8ca40-16d5-4a17-80a1-f5bf12f92d71" (UID: "e2f8ca40-16d5-4a17-80a1-f5bf12f92d71"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:51:54 crc kubenswrapper[4753]: I1205 17:51:54.300385 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e2f8ca40-16d5-4a17-80a1-f5bf12f92d71-inventory" (OuterVolumeSpecName: "inventory") pod "e2f8ca40-16d5-4a17-80a1-f5bf12f92d71" (UID: "e2f8ca40-16d5-4a17-80a1-f5bf12f92d71"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:51:54 crc kubenswrapper[4753]: I1205 17:51:54.307589 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e2f8ca40-16d5-4a17-80a1-f5bf12f92d71-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "e2f8ca40-16d5-4a17-80a1-f5bf12f92d71" (UID: "e2f8ca40-16d5-4a17-80a1-f5bf12f92d71"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:51:54 crc kubenswrapper[4753]: I1205 17:51:54.373036 4753 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2f8ca40-16d5-4a17-80a1-f5bf12f92d71-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:54 crc kubenswrapper[4753]: I1205 17:51:54.373064 4753 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e2f8ca40-16d5-4a17-80a1-f5bf12f92d71-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:54 crc kubenswrapper[4753]: I1205 17:51:54.373115 4753 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e2f8ca40-16d5-4a17-80a1-f5bf12f92d71-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:54 crc kubenswrapper[4753]: I1205 17:51:54.373124 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zdhvt\" (UniqueName: \"kubernetes.io/projected/e2f8ca40-16d5-4a17-80a1-f5bf12f92d71-kube-api-access-zdhvt\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:54 crc kubenswrapper[4753]: I1205 17:51:54.373132 4753 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/e2f8ca40-16d5-4a17-80a1-f5bf12f92d71-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:54 crc kubenswrapper[4753]: I1205 17:51:54.636248 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-kx88v" event={"ID":"e2f8ca40-16d5-4a17-80a1-f5bf12f92d71","Type":"ContainerDied","Data":"2f145e6cd20a40d93ba39235813d553fa615ed6e397df26f41b9a46203f4fcd8"} Dec 05 17:51:54 crc kubenswrapper[4753]: I1205 17:51:54.636695 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2f145e6cd20a40d93ba39235813d553fa615ed6e397df26f41b9a46203f4fcd8" Dec 05 17:51:54 crc kubenswrapper[4753]: I1205 17:51:54.636814 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-kx88v" Dec 05 17:51:54 crc kubenswrapper[4753]: I1205 17:51:54.776384 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-7l2pf"] Dec 05 17:51:54 crc kubenswrapper[4753]: E1205 17:51:54.777978 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b742d333-3e26-4c8c-bde6-357ad72ec798" containerName="registry-server" Dec 05 17:51:54 crc kubenswrapper[4753]: I1205 17:51:54.778025 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="b742d333-3e26-4c8c-bde6-357ad72ec798" containerName="registry-server" Dec 05 17:51:54 crc kubenswrapper[4753]: E1205 17:51:54.778075 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2f8ca40-16d5-4a17-80a1-f5bf12f92d71" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 05 17:51:54 crc kubenswrapper[4753]: I1205 17:51:54.778083 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2f8ca40-16d5-4a17-80a1-f5bf12f92d71" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 05 17:51:54 crc kubenswrapper[4753]: E1205 17:51:54.778117 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b742d333-3e26-4c8c-bde6-357ad72ec798" containerName="extract-content" Dec 05 17:51:54 crc kubenswrapper[4753]: I1205 17:51:54.778125 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="b742d333-3e26-4c8c-bde6-357ad72ec798" containerName="extract-content" Dec 05 17:51:54 crc kubenswrapper[4753]: E1205 17:51:54.778161 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b742d333-3e26-4c8c-bde6-357ad72ec798" containerName="extract-utilities" Dec 05 17:51:54 crc kubenswrapper[4753]: I1205 17:51:54.778169 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="b742d333-3e26-4c8c-bde6-357ad72ec798" containerName="extract-utilities" Dec 05 17:51:54 crc kubenswrapper[4753]: I1205 17:51:54.778825 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="b742d333-3e26-4c8c-bde6-357ad72ec798" containerName="registry-server" Dec 05 17:51:54 crc kubenswrapper[4753]: I1205 17:51:54.778855 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="e2f8ca40-16d5-4a17-80a1-f5bf12f92d71" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 05 17:51:54 crc kubenswrapper[4753]: I1205 17:51:54.780021 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-7l2pf" Dec 05 17:51:54 crc kubenswrapper[4753]: I1205 17:51:54.782267 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 17:51:54 crc kubenswrapper[4753]: I1205 17:51:54.782423 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 17:51:54 crc kubenswrapper[4753]: I1205 17:51:54.782555 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Dec 05 17:51:54 crc kubenswrapper[4753]: I1205 17:51:54.782679 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-2445p" Dec 05 17:51:54 crc kubenswrapper[4753]: I1205 17:51:54.784066 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 17:51:54 crc kubenswrapper[4753]: I1205 17:51:54.784458 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Dec 05 17:51:54 crc kubenswrapper[4753]: I1205 17:51:54.804263 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-7l2pf"] Dec 05 17:51:54 crc kubenswrapper[4753]: I1205 17:51:54.884931 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea5f795b-6ef0-4281-a619-1a89b547e436-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-7l2pf\" (UID: \"ea5f795b-6ef0-4281-a619-1a89b547e436\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-7l2pf" Dec 05 17:51:54 crc kubenswrapper[4753]: I1205 17:51:54.884985 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/ea5f795b-6ef0-4281-a619-1a89b547e436-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-7l2pf\" (UID: \"ea5f795b-6ef0-4281-a619-1a89b547e436\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-7l2pf" Dec 05 17:51:54 crc kubenswrapper[4753]: I1205 17:51:54.885053 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/ea5f795b-6ef0-4281-a619-1a89b547e436-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-7l2pf\" (UID: \"ea5f795b-6ef0-4281-a619-1a89b547e436\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-7l2pf" Dec 05 17:51:54 crc kubenswrapper[4753]: I1205 17:51:54.885187 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-62jd7\" (UniqueName: \"kubernetes.io/projected/ea5f795b-6ef0-4281-a619-1a89b547e436-kube-api-access-62jd7\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-7l2pf\" (UID: \"ea5f795b-6ef0-4281-a619-1a89b547e436\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-7l2pf" Dec 05 17:51:54 crc kubenswrapper[4753]: I1205 17:51:54.885217 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ea5f795b-6ef0-4281-a619-1a89b547e436-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-7l2pf\" (UID: \"ea5f795b-6ef0-4281-a619-1a89b547e436\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-7l2pf" Dec 05 17:51:54 crc kubenswrapper[4753]: I1205 17:51:54.885260 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ea5f795b-6ef0-4281-a619-1a89b547e436-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-7l2pf\" (UID: \"ea5f795b-6ef0-4281-a619-1a89b547e436\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-7l2pf" Dec 05 17:51:54 crc kubenswrapper[4753]: I1205 17:51:54.987195 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea5f795b-6ef0-4281-a619-1a89b547e436-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-7l2pf\" (UID: \"ea5f795b-6ef0-4281-a619-1a89b547e436\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-7l2pf" Dec 05 17:51:54 crc kubenswrapper[4753]: I1205 17:51:54.987253 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/ea5f795b-6ef0-4281-a619-1a89b547e436-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-7l2pf\" (UID: \"ea5f795b-6ef0-4281-a619-1a89b547e436\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-7l2pf" Dec 05 17:51:54 crc kubenswrapper[4753]: I1205 17:51:54.987309 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/ea5f795b-6ef0-4281-a619-1a89b547e436-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-7l2pf\" (UID: \"ea5f795b-6ef0-4281-a619-1a89b547e436\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-7l2pf" Dec 05 17:51:54 crc kubenswrapper[4753]: I1205 17:51:54.987391 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-62jd7\" (UniqueName: \"kubernetes.io/projected/ea5f795b-6ef0-4281-a619-1a89b547e436-kube-api-access-62jd7\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-7l2pf\" (UID: \"ea5f795b-6ef0-4281-a619-1a89b547e436\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-7l2pf" Dec 05 17:51:54 crc kubenswrapper[4753]: I1205 17:51:54.987443 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ea5f795b-6ef0-4281-a619-1a89b547e436-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-7l2pf\" (UID: \"ea5f795b-6ef0-4281-a619-1a89b547e436\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-7l2pf" Dec 05 17:51:54 crc kubenswrapper[4753]: I1205 17:51:54.987466 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ea5f795b-6ef0-4281-a619-1a89b547e436-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-7l2pf\" (UID: \"ea5f795b-6ef0-4281-a619-1a89b547e436\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-7l2pf" Dec 05 17:51:54 crc kubenswrapper[4753]: I1205 17:51:54.991930 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ea5f795b-6ef0-4281-a619-1a89b547e436-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-7l2pf\" (UID: \"ea5f795b-6ef0-4281-a619-1a89b547e436\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-7l2pf" Dec 05 17:51:54 crc kubenswrapper[4753]: I1205 17:51:54.991983 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ea5f795b-6ef0-4281-a619-1a89b547e436-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-7l2pf\" (UID: \"ea5f795b-6ef0-4281-a619-1a89b547e436\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-7l2pf" Dec 05 17:51:54 crc kubenswrapper[4753]: I1205 17:51:54.992534 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/ea5f795b-6ef0-4281-a619-1a89b547e436-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-7l2pf\" (UID: \"ea5f795b-6ef0-4281-a619-1a89b547e436\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-7l2pf" Dec 05 17:51:54 crc kubenswrapper[4753]: I1205 17:51:54.992849 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/ea5f795b-6ef0-4281-a619-1a89b547e436-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-7l2pf\" (UID: \"ea5f795b-6ef0-4281-a619-1a89b547e436\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-7l2pf" Dec 05 17:51:54 crc kubenswrapper[4753]: I1205 17:51:54.997799 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea5f795b-6ef0-4281-a619-1a89b547e436-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-7l2pf\" (UID: \"ea5f795b-6ef0-4281-a619-1a89b547e436\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-7l2pf" Dec 05 17:51:55 crc kubenswrapper[4753]: I1205 17:51:55.012868 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-62jd7\" (UniqueName: \"kubernetes.io/projected/ea5f795b-6ef0-4281-a619-1a89b547e436-kube-api-access-62jd7\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-7l2pf\" (UID: \"ea5f795b-6ef0-4281-a619-1a89b547e436\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-7l2pf" Dec 05 17:51:55 crc kubenswrapper[4753]: I1205 17:51:55.117252 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-7l2pf" Dec 05 17:51:55 crc kubenswrapper[4753]: I1205 17:51:55.706824 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-7l2pf"] Dec 05 17:51:56 crc kubenswrapper[4753]: I1205 17:51:56.660635 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-7l2pf" event={"ID":"ea5f795b-6ef0-4281-a619-1a89b547e436","Type":"ContainerStarted","Data":"a3bedb6437ff8de6c43c7ce70478133f8003383e60a7fc18a8ab4f6fe9d933fb"} Dec 05 17:51:56 crc kubenswrapper[4753]: I1205 17:51:56.661290 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-7l2pf" event={"ID":"ea5f795b-6ef0-4281-a619-1a89b547e436","Type":"ContainerStarted","Data":"b9ff965b3bc3d7ec11bcf070a0e95fd961a75930f29d03a669cc754e9bc5031e"} Dec 05 17:51:58 crc kubenswrapper[4753]: I1205 17:51:58.979466 4753 patch_prober.go:28] interesting pod/machine-config-daemon-khn68 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 17:51:58 crc kubenswrapper[4753]: I1205 17:51:58.979792 4753 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 17:52:28 crc kubenswrapper[4753]: I1205 17:52:28.978653 4753 patch_prober.go:28] interesting pod/machine-config-daemon-khn68 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 17:52:28 crc kubenswrapper[4753]: I1205 17:52:28.979354 4753 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 17:52:46 crc kubenswrapper[4753]: I1205 17:52:46.195955 4753 generic.go:334] "Generic (PLEG): container finished" podID="ea5f795b-6ef0-4281-a619-1a89b547e436" containerID="a3bedb6437ff8de6c43c7ce70478133f8003383e60a7fc18a8ab4f6fe9d933fb" exitCode=0 Dec 05 17:52:46 crc kubenswrapper[4753]: I1205 17:52:46.196075 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-7l2pf" event={"ID":"ea5f795b-6ef0-4281-a619-1a89b547e436","Type":"ContainerDied","Data":"a3bedb6437ff8de6c43c7ce70478133f8003383e60a7fc18a8ab4f6fe9d933fb"} Dec 05 17:52:47 crc kubenswrapper[4753]: I1205 17:52:47.751808 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-7l2pf" Dec 05 17:52:47 crc kubenswrapper[4753]: I1205 17:52:47.856796 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ea5f795b-6ef0-4281-a619-1a89b547e436-inventory\") pod \"ea5f795b-6ef0-4281-a619-1a89b547e436\" (UID: \"ea5f795b-6ef0-4281-a619-1a89b547e436\") " Dec 05 17:52:47 crc kubenswrapper[4753]: I1205 17:52:47.856833 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea5f795b-6ef0-4281-a619-1a89b547e436-neutron-metadata-combined-ca-bundle\") pod \"ea5f795b-6ef0-4281-a619-1a89b547e436\" (UID: \"ea5f795b-6ef0-4281-a619-1a89b547e436\") " Dec 05 17:52:47 crc kubenswrapper[4753]: I1205 17:52:47.856907 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-62jd7\" (UniqueName: \"kubernetes.io/projected/ea5f795b-6ef0-4281-a619-1a89b547e436-kube-api-access-62jd7\") pod \"ea5f795b-6ef0-4281-a619-1a89b547e436\" (UID: \"ea5f795b-6ef0-4281-a619-1a89b547e436\") " Dec 05 17:52:47 crc kubenswrapper[4753]: I1205 17:52:47.856931 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/ea5f795b-6ef0-4281-a619-1a89b547e436-neutron-ovn-metadata-agent-neutron-config-0\") pod \"ea5f795b-6ef0-4281-a619-1a89b547e436\" (UID: \"ea5f795b-6ef0-4281-a619-1a89b547e436\") " Dec 05 17:52:47 crc kubenswrapper[4753]: I1205 17:52:47.856972 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ea5f795b-6ef0-4281-a619-1a89b547e436-ssh-key\") pod \"ea5f795b-6ef0-4281-a619-1a89b547e436\" (UID: \"ea5f795b-6ef0-4281-a619-1a89b547e436\") " Dec 05 17:52:47 crc kubenswrapper[4753]: I1205 17:52:47.856987 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/ea5f795b-6ef0-4281-a619-1a89b547e436-nova-metadata-neutron-config-0\") pod \"ea5f795b-6ef0-4281-a619-1a89b547e436\" (UID: \"ea5f795b-6ef0-4281-a619-1a89b547e436\") " Dec 05 17:52:47 crc kubenswrapper[4753]: I1205 17:52:47.867939 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ea5f795b-6ef0-4281-a619-1a89b547e436-kube-api-access-62jd7" (OuterVolumeSpecName: "kube-api-access-62jd7") pod "ea5f795b-6ef0-4281-a619-1a89b547e436" (UID: "ea5f795b-6ef0-4281-a619-1a89b547e436"). InnerVolumeSpecName "kube-api-access-62jd7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:52:47 crc kubenswrapper[4753]: I1205 17:52:47.868195 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea5f795b-6ef0-4281-a619-1a89b547e436-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "ea5f795b-6ef0-4281-a619-1a89b547e436" (UID: "ea5f795b-6ef0-4281-a619-1a89b547e436"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:52:47 crc kubenswrapper[4753]: I1205 17:52:47.887908 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea5f795b-6ef0-4281-a619-1a89b547e436-inventory" (OuterVolumeSpecName: "inventory") pod "ea5f795b-6ef0-4281-a619-1a89b547e436" (UID: "ea5f795b-6ef0-4281-a619-1a89b547e436"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:52:47 crc kubenswrapper[4753]: I1205 17:52:47.888318 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea5f795b-6ef0-4281-a619-1a89b547e436-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "ea5f795b-6ef0-4281-a619-1a89b547e436" (UID: "ea5f795b-6ef0-4281-a619-1a89b547e436"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:52:47 crc kubenswrapper[4753]: I1205 17:52:47.888378 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea5f795b-6ef0-4281-a619-1a89b547e436-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "ea5f795b-6ef0-4281-a619-1a89b547e436" (UID: "ea5f795b-6ef0-4281-a619-1a89b547e436"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:52:47 crc kubenswrapper[4753]: I1205 17:52:47.898943 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea5f795b-6ef0-4281-a619-1a89b547e436-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "ea5f795b-6ef0-4281-a619-1a89b547e436" (UID: "ea5f795b-6ef0-4281-a619-1a89b547e436"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:52:47 crc kubenswrapper[4753]: I1205 17:52:47.958338 4753 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ea5f795b-6ef0-4281-a619-1a89b547e436-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:47 crc kubenswrapper[4753]: I1205 17:52:47.958371 4753 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea5f795b-6ef0-4281-a619-1a89b547e436-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:47 crc kubenswrapper[4753]: I1205 17:52:47.958384 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-62jd7\" (UniqueName: \"kubernetes.io/projected/ea5f795b-6ef0-4281-a619-1a89b547e436-kube-api-access-62jd7\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:47 crc kubenswrapper[4753]: I1205 17:52:47.958396 4753 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/ea5f795b-6ef0-4281-a619-1a89b547e436-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:47 crc kubenswrapper[4753]: I1205 17:52:47.958406 4753 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ea5f795b-6ef0-4281-a619-1a89b547e436-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:47 crc kubenswrapper[4753]: I1205 17:52:47.958414 4753 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/ea5f795b-6ef0-4281-a619-1a89b547e436-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:48 crc kubenswrapper[4753]: I1205 17:52:48.220472 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-7l2pf" event={"ID":"ea5f795b-6ef0-4281-a619-1a89b547e436","Type":"ContainerDied","Data":"b9ff965b3bc3d7ec11bcf070a0e95fd961a75930f29d03a669cc754e9bc5031e"} Dec 05 17:52:48 crc kubenswrapper[4753]: I1205 17:52:48.220802 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b9ff965b3bc3d7ec11bcf070a0e95fd961a75930f29d03a669cc754e9bc5031e" Dec 05 17:52:48 crc kubenswrapper[4753]: I1205 17:52:48.220720 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-7l2pf" Dec 05 17:52:48 crc kubenswrapper[4753]: I1205 17:52:48.304129 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-c9d6z"] Dec 05 17:52:48 crc kubenswrapper[4753]: E1205 17:52:48.304716 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea5f795b-6ef0-4281-a619-1a89b547e436" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 05 17:52:48 crc kubenswrapper[4753]: I1205 17:52:48.304739 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea5f795b-6ef0-4281-a619-1a89b547e436" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 05 17:52:48 crc kubenswrapper[4753]: I1205 17:52:48.304997 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea5f795b-6ef0-4281-a619-1a89b547e436" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 05 17:52:48 crc kubenswrapper[4753]: I1205 17:52:48.305976 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-c9d6z" Dec 05 17:52:48 crc kubenswrapper[4753]: I1205 17:52:48.308121 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 17:52:48 crc kubenswrapper[4753]: I1205 17:52:48.308268 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-2445p" Dec 05 17:52:48 crc kubenswrapper[4753]: I1205 17:52:48.308812 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 17:52:48 crc kubenswrapper[4753]: I1205 17:52:48.309250 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Dec 05 17:52:48 crc kubenswrapper[4753]: I1205 17:52:48.309250 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 17:52:48 crc kubenswrapper[4753]: I1205 17:52:48.321448 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-c9d6z"] Dec 05 17:52:48 crc kubenswrapper[4753]: I1205 17:52:48.365188 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-c9d6z\" (UID: \"d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-c9d6z" Dec 05 17:52:48 crc kubenswrapper[4753]: I1205 17:52:48.365366 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-c9d6z\" (UID: \"d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-c9d6z" Dec 05 17:52:48 crc kubenswrapper[4753]: I1205 17:52:48.365730 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d8ncn\" (UniqueName: \"kubernetes.io/projected/d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5-kube-api-access-d8ncn\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-c9d6z\" (UID: \"d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-c9d6z" Dec 05 17:52:48 crc kubenswrapper[4753]: I1205 17:52:48.365785 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-c9d6z\" (UID: \"d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-c9d6z" Dec 05 17:52:48 crc kubenswrapper[4753]: I1205 17:52:48.365891 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-c9d6z\" (UID: \"d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-c9d6z" Dec 05 17:52:48 crc kubenswrapper[4753]: I1205 17:52:48.467811 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-c9d6z\" (UID: \"d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-c9d6z" Dec 05 17:52:48 crc kubenswrapper[4753]: I1205 17:52:48.467860 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d8ncn\" (UniqueName: \"kubernetes.io/projected/d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5-kube-api-access-d8ncn\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-c9d6z\" (UID: \"d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-c9d6z" Dec 05 17:52:48 crc kubenswrapper[4753]: I1205 17:52:48.467896 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-c9d6z\" (UID: \"d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-c9d6z" Dec 05 17:52:48 crc kubenswrapper[4753]: I1205 17:52:48.467922 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-c9d6z\" (UID: \"d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-c9d6z" Dec 05 17:52:48 crc kubenswrapper[4753]: I1205 17:52:48.468019 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-c9d6z\" (UID: \"d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-c9d6z" Dec 05 17:52:48 crc kubenswrapper[4753]: I1205 17:52:48.475604 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-c9d6z\" (UID: \"d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-c9d6z" Dec 05 17:52:48 crc kubenswrapper[4753]: I1205 17:52:48.475842 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-c9d6z\" (UID: \"d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-c9d6z" Dec 05 17:52:48 crc kubenswrapper[4753]: I1205 17:52:48.477025 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-c9d6z\" (UID: \"d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-c9d6z" Dec 05 17:52:48 crc kubenswrapper[4753]: I1205 17:52:48.491479 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-c9d6z\" (UID: \"d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-c9d6z" Dec 05 17:52:48 crc kubenswrapper[4753]: I1205 17:52:48.497244 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d8ncn\" (UniqueName: \"kubernetes.io/projected/d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5-kube-api-access-d8ncn\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-c9d6z\" (UID: \"d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-c9d6z" Dec 05 17:52:48 crc kubenswrapper[4753]: I1205 17:52:48.640130 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-c9d6z" Dec 05 17:52:49 crc kubenswrapper[4753]: I1205 17:52:49.223479 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-c9d6z"] Dec 05 17:52:50 crc kubenswrapper[4753]: I1205 17:52:50.245774 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-c9d6z" event={"ID":"d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5","Type":"ContainerStarted","Data":"2dc4ad7599ba3e0c8049fdeb6f75ff160e5d8e5c0725aaba5c6b2c5fbfa5e609"} Dec 05 17:52:50 crc kubenswrapper[4753]: I1205 17:52:50.246252 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-c9d6z" event={"ID":"d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5","Type":"ContainerStarted","Data":"65dcab63a02eab33acb3ffc6ddeee94d06c581eb8c657bac5d69bf3fb2f314ef"} Dec 05 17:52:50 crc kubenswrapper[4753]: I1205 17:52:50.274577 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-c9d6z" podStartSLOduration=1.660644956 podStartE2EDuration="2.274559619s" podCreationTimestamp="2025-12-05 17:52:48 +0000 UTC" firstStartedPulling="2025-12-05 17:52:49.231412754 +0000 UTC m=+2907.734519770" lastFinishedPulling="2025-12-05 17:52:49.845327417 +0000 UTC m=+2908.348434433" observedRunningTime="2025-12-05 17:52:50.265550533 +0000 UTC m=+2908.768657539" watchObservedRunningTime="2025-12-05 17:52:50.274559619 +0000 UTC m=+2908.777666625" Dec 05 17:52:58 crc kubenswrapper[4753]: I1205 17:52:58.979049 4753 patch_prober.go:28] interesting pod/machine-config-daemon-khn68 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 17:52:58 crc kubenswrapper[4753]: I1205 17:52:58.979622 4753 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 17:52:58 crc kubenswrapper[4753]: I1205 17:52:58.979669 4753 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-khn68" Dec 05 17:52:58 crc kubenswrapper[4753]: I1205 17:52:58.980467 4753 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5828e66b356a3207c12e78ba1c90384790225a61c83c3ec4cd9142584bace274"} pod="openshift-machine-config-operator/machine-config-daemon-khn68" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 17:52:58 crc kubenswrapper[4753]: I1205 17:52:58.980523 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerName="machine-config-daemon" containerID="cri-o://5828e66b356a3207c12e78ba1c90384790225a61c83c3ec4cd9142584bace274" gracePeriod=600 Dec 05 17:52:59 crc kubenswrapper[4753]: I1205 17:52:59.364492 4753 generic.go:334] "Generic (PLEG): container finished" podID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerID="5828e66b356a3207c12e78ba1c90384790225a61c83c3ec4cd9142584bace274" exitCode=0 Dec 05 17:52:59 crc kubenswrapper[4753]: I1205 17:52:59.364731 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" event={"ID":"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68","Type":"ContainerDied","Data":"5828e66b356a3207c12e78ba1c90384790225a61c83c3ec4cd9142584bace274"} Dec 05 17:52:59 crc kubenswrapper[4753]: I1205 17:52:59.364894 4753 scope.go:117] "RemoveContainer" containerID="1a837c74d8c8977a86c74839c544fee61e72d58cfc2be715d8c463cb4d09c5d8" Dec 05 17:53:00 crc kubenswrapper[4753]: I1205 17:53:00.374815 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" event={"ID":"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68","Type":"ContainerStarted","Data":"ac6f1e1e98711fb4ac9c08b197f72af512a76d659ae2deed4807bbfa0559c18e"} Dec 05 17:55:28 crc kubenswrapper[4753]: I1205 17:55:28.979217 4753 patch_prober.go:28] interesting pod/machine-config-daemon-khn68 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 17:55:28 crc kubenswrapper[4753]: I1205 17:55:28.979762 4753 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 17:55:58 crc kubenswrapper[4753]: I1205 17:55:58.978832 4753 patch_prober.go:28] interesting pod/machine-config-daemon-khn68 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 17:55:58 crc kubenswrapper[4753]: I1205 17:55:58.979432 4753 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 17:56:28 crc kubenswrapper[4753]: I1205 17:56:28.978957 4753 patch_prober.go:28] interesting pod/machine-config-daemon-khn68 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 17:56:28 crc kubenswrapper[4753]: I1205 17:56:28.979751 4753 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 17:56:28 crc kubenswrapper[4753]: I1205 17:56:28.979841 4753 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-khn68" Dec 05 17:56:28 crc kubenswrapper[4753]: I1205 17:56:28.981313 4753 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ac6f1e1e98711fb4ac9c08b197f72af512a76d659ae2deed4807bbfa0559c18e"} pod="openshift-machine-config-operator/machine-config-daemon-khn68" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 17:56:28 crc kubenswrapper[4753]: I1205 17:56:28.981464 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerName="machine-config-daemon" containerID="cri-o://ac6f1e1e98711fb4ac9c08b197f72af512a76d659ae2deed4807bbfa0559c18e" gracePeriod=600 Dec 05 17:56:29 crc kubenswrapper[4753]: E1205 17:56:29.113915 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 17:56:29 crc kubenswrapper[4753]: I1205 17:56:29.713354 4753 generic.go:334] "Generic (PLEG): container finished" podID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerID="ac6f1e1e98711fb4ac9c08b197f72af512a76d659ae2deed4807bbfa0559c18e" exitCode=0 Dec 05 17:56:29 crc kubenswrapper[4753]: I1205 17:56:29.713397 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" event={"ID":"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68","Type":"ContainerDied","Data":"ac6f1e1e98711fb4ac9c08b197f72af512a76d659ae2deed4807bbfa0559c18e"} Dec 05 17:56:29 crc kubenswrapper[4753]: I1205 17:56:29.713433 4753 scope.go:117] "RemoveContainer" containerID="5828e66b356a3207c12e78ba1c90384790225a61c83c3ec4cd9142584bace274" Dec 05 17:56:29 crc kubenswrapper[4753]: I1205 17:56:29.714129 4753 scope.go:117] "RemoveContainer" containerID="ac6f1e1e98711fb4ac9c08b197f72af512a76d659ae2deed4807bbfa0559c18e" Dec 05 17:56:29 crc kubenswrapper[4753]: E1205 17:56:29.714512 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 17:56:41 crc kubenswrapper[4753]: I1205 17:56:41.726725 4753 scope.go:117] "RemoveContainer" containerID="ac6f1e1e98711fb4ac9c08b197f72af512a76d659ae2deed4807bbfa0559c18e" Dec 05 17:56:41 crc kubenswrapper[4753]: E1205 17:56:41.727552 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 17:56:54 crc kubenswrapper[4753]: I1205 17:56:54.721066 4753 scope.go:117] "RemoveContainer" containerID="ac6f1e1e98711fb4ac9c08b197f72af512a76d659ae2deed4807bbfa0559c18e" Dec 05 17:56:54 crc kubenswrapper[4753]: E1205 17:56:54.722099 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 17:57:08 crc kubenswrapper[4753]: I1205 17:57:08.720746 4753 scope.go:117] "RemoveContainer" containerID="ac6f1e1e98711fb4ac9c08b197f72af512a76d659ae2deed4807bbfa0559c18e" Dec 05 17:57:08 crc kubenswrapper[4753]: E1205 17:57:08.721779 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 17:57:23 crc kubenswrapper[4753]: I1205 17:57:23.720509 4753 scope.go:117] "RemoveContainer" containerID="ac6f1e1e98711fb4ac9c08b197f72af512a76d659ae2deed4807bbfa0559c18e" Dec 05 17:57:23 crc kubenswrapper[4753]: E1205 17:57:23.721396 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 17:57:25 crc kubenswrapper[4753]: I1205 17:57:25.341530 4753 generic.go:334] "Generic (PLEG): container finished" podID="d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5" containerID="2dc4ad7599ba3e0c8049fdeb6f75ff160e5d8e5c0725aaba5c6b2c5fbfa5e609" exitCode=0 Dec 05 17:57:25 crc kubenswrapper[4753]: I1205 17:57:25.341638 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-c9d6z" event={"ID":"d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5","Type":"ContainerDied","Data":"2dc4ad7599ba3e0c8049fdeb6f75ff160e5d8e5c0725aaba5c6b2c5fbfa5e609"} Dec 05 17:57:26 crc kubenswrapper[4753]: I1205 17:57:26.899794 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-c9d6z" Dec 05 17:57:26 crc kubenswrapper[4753]: I1205 17:57:26.983141 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5-ssh-key\") pod \"d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5\" (UID: \"d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5\") " Dec 05 17:57:26 crc kubenswrapper[4753]: I1205 17:57:26.983234 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5-libvirt-combined-ca-bundle\") pod \"d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5\" (UID: \"d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5\") " Dec 05 17:57:26 crc kubenswrapper[4753]: I1205 17:57:26.983332 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d8ncn\" (UniqueName: \"kubernetes.io/projected/d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5-kube-api-access-d8ncn\") pod \"d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5\" (UID: \"d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5\") " Dec 05 17:57:26 crc kubenswrapper[4753]: I1205 17:57:26.983394 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5-inventory\") pod \"d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5\" (UID: \"d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5\") " Dec 05 17:57:26 crc kubenswrapper[4753]: I1205 17:57:26.983534 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5-libvirt-secret-0\") pod \"d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5\" (UID: \"d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5\") " Dec 05 17:57:26 crc kubenswrapper[4753]: I1205 17:57:26.996954 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5-kube-api-access-d8ncn" (OuterVolumeSpecName: "kube-api-access-d8ncn") pod "d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5" (UID: "d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5"). InnerVolumeSpecName "kube-api-access-d8ncn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:57:26 crc kubenswrapper[4753]: I1205 17:57:26.999411 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5" (UID: "d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:57:27 crc kubenswrapper[4753]: I1205 17:57:27.020263 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5-inventory" (OuterVolumeSpecName: "inventory") pod "d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5" (UID: "d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:57:27 crc kubenswrapper[4753]: I1205 17:57:27.037892 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5" (UID: "d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:57:27 crc kubenswrapper[4753]: I1205 17:57:27.048306 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5" (UID: "d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:57:27 crc kubenswrapper[4753]: I1205 17:57:27.086702 4753 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 17:57:27 crc kubenswrapper[4753]: I1205 17:57:27.086752 4753 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:57:27 crc kubenswrapper[4753]: I1205 17:57:27.086769 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d8ncn\" (UniqueName: \"kubernetes.io/projected/d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5-kube-api-access-d8ncn\") on node \"crc\" DevicePath \"\"" Dec 05 17:57:27 crc kubenswrapper[4753]: I1205 17:57:27.086780 4753 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 17:57:27 crc kubenswrapper[4753]: I1205 17:57:27.086793 4753 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Dec 05 17:57:27 crc kubenswrapper[4753]: I1205 17:57:27.365816 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-c9d6z" event={"ID":"d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5","Type":"ContainerDied","Data":"65dcab63a02eab33acb3ffc6ddeee94d06c581eb8c657bac5d69bf3fb2f314ef"} Dec 05 17:57:27 crc kubenswrapper[4753]: I1205 17:57:27.366194 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="65dcab63a02eab33acb3ffc6ddeee94d06c581eb8c657bac5d69bf3fb2f314ef" Dec 05 17:57:27 crc kubenswrapper[4753]: I1205 17:57:27.365923 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-c9d6z" Dec 05 17:57:27 crc kubenswrapper[4753]: I1205 17:57:27.481390 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-tcnxd"] Dec 05 17:57:27 crc kubenswrapper[4753]: E1205 17:57:27.482620 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 05 17:57:27 crc kubenswrapper[4753]: I1205 17:57:27.482661 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 05 17:57:27 crc kubenswrapper[4753]: I1205 17:57:27.483538 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 05 17:57:27 crc kubenswrapper[4753]: I1205 17:57:27.484599 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-tcnxd" Dec 05 17:57:27 crc kubenswrapper[4753]: I1205 17:57:27.488270 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 17:57:27 crc kubenswrapper[4753]: I1205 17:57:27.516958 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 17:57:27 crc kubenswrapper[4753]: I1205 17:57:27.516955 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Dec 05 17:57:27 crc kubenswrapper[4753]: I1205 17:57:27.517069 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 17:57:27 crc kubenswrapper[4753]: I1205 17:57:27.517105 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-extra-config" Dec 05 17:57:27 crc kubenswrapper[4753]: I1205 17:57:27.517071 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Dec 05 17:57:27 crc kubenswrapper[4753]: I1205 17:57:27.517241 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-2445p" Dec 05 17:57:27 crc kubenswrapper[4753]: I1205 17:57:27.535181 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-tcnxd"] Dec 05 17:57:27 crc kubenswrapper[4753]: I1205 17:57:27.604006 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-tcnxd\" (UID: \"352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-tcnxd" Dec 05 17:57:27 crc kubenswrapper[4753]: I1205 17:57:27.604054 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-tcnxd\" (UID: \"352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-tcnxd" Dec 05 17:57:27 crc kubenswrapper[4753]: I1205 17:57:27.604246 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-tcnxd\" (UID: \"352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-tcnxd" Dec 05 17:57:27 crc kubenswrapper[4753]: I1205 17:57:27.604379 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-tcnxd\" (UID: \"352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-tcnxd" Dec 05 17:57:27 crc kubenswrapper[4753]: I1205 17:57:27.604594 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-tcnxd\" (UID: \"352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-tcnxd" Dec 05 17:57:27 crc kubenswrapper[4753]: I1205 17:57:27.604677 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-tcnxd\" (UID: \"352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-tcnxd" Dec 05 17:57:27 crc kubenswrapper[4753]: I1205 17:57:27.604714 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-628nb\" (UniqueName: \"kubernetes.io/projected/352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1-kube-api-access-628nb\") pod \"nova-edpm-deployment-openstack-edpm-ipam-tcnxd\" (UID: \"352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-tcnxd" Dec 05 17:57:27 crc kubenswrapper[4753]: I1205 17:57:27.604826 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-tcnxd\" (UID: \"352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-tcnxd" Dec 05 17:57:27 crc kubenswrapper[4753]: I1205 17:57:27.604886 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-tcnxd\" (UID: \"352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-tcnxd" Dec 05 17:57:27 crc kubenswrapper[4753]: I1205 17:57:27.706994 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-628nb\" (UniqueName: \"kubernetes.io/projected/352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1-kube-api-access-628nb\") pod \"nova-edpm-deployment-openstack-edpm-ipam-tcnxd\" (UID: \"352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-tcnxd" Dec 05 17:57:27 crc kubenswrapper[4753]: I1205 17:57:27.707075 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-tcnxd\" (UID: \"352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-tcnxd" Dec 05 17:57:27 crc kubenswrapper[4753]: I1205 17:57:27.707103 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-tcnxd\" (UID: \"352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-tcnxd" Dec 05 17:57:27 crc kubenswrapper[4753]: I1205 17:57:27.707178 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-tcnxd\" (UID: \"352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-tcnxd" Dec 05 17:57:27 crc kubenswrapper[4753]: I1205 17:57:27.707202 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-tcnxd\" (UID: \"352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-tcnxd" Dec 05 17:57:27 crc kubenswrapper[4753]: I1205 17:57:27.707235 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-tcnxd\" (UID: \"352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-tcnxd" Dec 05 17:57:27 crc kubenswrapper[4753]: I1205 17:57:27.707265 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-tcnxd\" (UID: \"352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-tcnxd" Dec 05 17:57:27 crc kubenswrapper[4753]: I1205 17:57:27.707328 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-tcnxd\" (UID: \"352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-tcnxd" Dec 05 17:57:27 crc kubenswrapper[4753]: I1205 17:57:27.707359 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-tcnxd\" (UID: \"352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-tcnxd" Dec 05 17:57:27 crc kubenswrapper[4753]: I1205 17:57:27.708859 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-tcnxd\" (UID: \"352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-tcnxd" Dec 05 17:57:27 crc kubenswrapper[4753]: I1205 17:57:27.711043 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-tcnxd\" (UID: \"352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-tcnxd" Dec 05 17:57:27 crc kubenswrapper[4753]: I1205 17:57:27.711191 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-tcnxd\" (UID: \"352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-tcnxd" Dec 05 17:57:27 crc kubenswrapper[4753]: I1205 17:57:27.711852 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-tcnxd\" (UID: \"352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-tcnxd" Dec 05 17:57:27 crc kubenswrapper[4753]: I1205 17:57:27.712640 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-tcnxd\" (UID: \"352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-tcnxd" Dec 05 17:57:27 crc kubenswrapper[4753]: I1205 17:57:27.715137 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-tcnxd\" (UID: \"352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-tcnxd" Dec 05 17:57:27 crc kubenswrapper[4753]: I1205 17:57:27.715933 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-tcnxd\" (UID: \"352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-tcnxd" Dec 05 17:57:27 crc kubenswrapper[4753]: I1205 17:57:27.716635 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-tcnxd\" (UID: \"352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-tcnxd" Dec 05 17:57:27 crc kubenswrapper[4753]: I1205 17:57:27.728111 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-628nb\" (UniqueName: \"kubernetes.io/projected/352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1-kube-api-access-628nb\") pod \"nova-edpm-deployment-openstack-edpm-ipam-tcnxd\" (UID: \"352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-tcnxd" Dec 05 17:57:27 crc kubenswrapper[4753]: I1205 17:57:27.815834 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-tcnxd" Dec 05 17:57:28 crc kubenswrapper[4753]: I1205 17:57:28.398208 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-tcnxd"] Dec 05 17:57:28 crc kubenswrapper[4753]: I1205 17:57:28.406265 4753 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 17:57:29 crc kubenswrapper[4753]: I1205 17:57:29.389100 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-tcnxd" event={"ID":"352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1","Type":"ContainerStarted","Data":"47d28c26cc75266b19dfe80e4d8aaa4c109ce5d16526562b4faa5e0d944ac2ff"} Dec 05 17:57:29 crc kubenswrapper[4753]: I1205 17:57:29.389652 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-tcnxd" event={"ID":"352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1","Type":"ContainerStarted","Data":"0e77734c0e9756904fd574c496debf6155c880ae8a867e60e7fe0bf0c9fa4db0"} Dec 05 17:57:29 crc kubenswrapper[4753]: I1205 17:57:29.411225 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-tcnxd" podStartSLOduration=1.922600715 podStartE2EDuration="2.411205513s" podCreationTimestamp="2025-12-05 17:57:27 +0000 UTC" firstStartedPulling="2025-12-05 17:57:28.406046193 +0000 UTC m=+3186.909153199" lastFinishedPulling="2025-12-05 17:57:28.894650961 +0000 UTC m=+3187.397757997" observedRunningTime="2025-12-05 17:57:29.409184776 +0000 UTC m=+3187.912291792" watchObservedRunningTime="2025-12-05 17:57:29.411205513 +0000 UTC m=+3187.914312529" Dec 05 17:57:36 crc kubenswrapper[4753]: I1205 17:57:36.720863 4753 scope.go:117] "RemoveContainer" containerID="ac6f1e1e98711fb4ac9c08b197f72af512a76d659ae2deed4807bbfa0559c18e" Dec 05 17:57:36 crc kubenswrapper[4753]: E1205 17:57:36.721637 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 17:57:48 crc kubenswrapper[4753]: I1205 17:57:48.721302 4753 scope.go:117] "RemoveContainer" containerID="ac6f1e1e98711fb4ac9c08b197f72af512a76d659ae2deed4807bbfa0559c18e" Dec 05 17:57:48 crc kubenswrapper[4753]: E1205 17:57:48.722238 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 17:58:03 crc kubenswrapper[4753]: I1205 17:58:03.720529 4753 scope.go:117] "RemoveContainer" containerID="ac6f1e1e98711fb4ac9c08b197f72af512a76d659ae2deed4807bbfa0559c18e" Dec 05 17:58:03 crc kubenswrapper[4753]: E1205 17:58:03.722797 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 17:58:15 crc kubenswrapper[4753]: I1205 17:58:15.720539 4753 scope.go:117] "RemoveContainer" containerID="ac6f1e1e98711fb4ac9c08b197f72af512a76d659ae2deed4807bbfa0559c18e" Dec 05 17:58:15 crc kubenswrapper[4753]: E1205 17:58:15.721477 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 17:58:26 crc kubenswrapper[4753]: I1205 17:58:26.721545 4753 scope.go:117] "RemoveContainer" containerID="ac6f1e1e98711fb4ac9c08b197f72af512a76d659ae2deed4807bbfa0559c18e" Dec 05 17:58:26 crc kubenswrapper[4753]: E1205 17:58:26.722609 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 17:58:41 crc kubenswrapper[4753]: I1205 17:58:41.732883 4753 scope.go:117] "RemoveContainer" containerID="ac6f1e1e98711fb4ac9c08b197f72af512a76d659ae2deed4807bbfa0559c18e" Dec 05 17:58:41 crc kubenswrapper[4753]: E1205 17:58:41.734556 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 17:58:55 crc kubenswrapper[4753]: I1205 17:58:55.721192 4753 scope.go:117] "RemoveContainer" containerID="ac6f1e1e98711fb4ac9c08b197f72af512a76d659ae2deed4807bbfa0559c18e" Dec 05 17:58:55 crc kubenswrapper[4753]: E1205 17:58:55.721992 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 17:59:07 crc kubenswrapper[4753]: I1205 17:59:07.720935 4753 scope.go:117] "RemoveContainer" containerID="ac6f1e1e98711fb4ac9c08b197f72af512a76d659ae2deed4807bbfa0559c18e" Dec 05 17:59:07 crc kubenswrapper[4753]: E1205 17:59:07.721711 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 17:59:20 crc kubenswrapper[4753]: I1205 17:59:20.721230 4753 scope.go:117] "RemoveContainer" containerID="ac6f1e1e98711fb4ac9c08b197f72af512a76d659ae2deed4807bbfa0559c18e" Dec 05 17:59:20 crc kubenswrapper[4753]: E1205 17:59:20.722134 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 17:59:33 crc kubenswrapper[4753]: I1205 17:59:33.721538 4753 scope.go:117] "RemoveContainer" containerID="ac6f1e1e98711fb4ac9c08b197f72af512a76d659ae2deed4807bbfa0559c18e" Dec 05 17:59:33 crc kubenswrapper[4753]: E1205 17:59:33.722562 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 17:59:44 crc kubenswrapper[4753]: I1205 17:59:44.721275 4753 scope.go:117] "RemoveContainer" containerID="ac6f1e1e98711fb4ac9c08b197f72af512a76d659ae2deed4807bbfa0559c18e" Dec 05 17:59:44 crc kubenswrapper[4753]: E1205 17:59:44.722016 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 17:59:57 crc kubenswrapper[4753]: I1205 17:59:57.721383 4753 scope.go:117] "RemoveContainer" containerID="ac6f1e1e98711fb4ac9c08b197f72af512a76d659ae2deed4807bbfa0559c18e" Dec 05 17:59:57 crc kubenswrapper[4753]: E1205 17:59:57.722253 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 18:00:00 crc kubenswrapper[4753]: I1205 18:00:00.150413 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415960-kjwl6"] Dec 05 18:00:00 crc kubenswrapper[4753]: I1205 18:00:00.152164 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415960-kjwl6" Dec 05 18:00:00 crc kubenswrapper[4753]: I1205 18:00:00.154852 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 18:00:00 crc kubenswrapper[4753]: I1205 18:00:00.154981 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 18:00:00 crc kubenswrapper[4753]: I1205 18:00:00.162088 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415960-kjwl6"] Dec 05 18:00:00 crc kubenswrapper[4753]: I1205 18:00:00.260571 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7b665421-2d5a-4c71-a3a8-885bd609505c-config-volume\") pod \"collect-profiles-29415960-kjwl6\" (UID: \"7b665421-2d5a-4c71-a3a8-885bd609505c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415960-kjwl6" Dec 05 18:00:00 crc kubenswrapper[4753]: I1205 18:00:00.263441 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xcdcc\" (UniqueName: \"kubernetes.io/projected/7b665421-2d5a-4c71-a3a8-885bd609505c-kube-api-access-xcdcc\") pod \"collect-profiles-29415960-kjwl6\" (UID: \"7b665421-2d5a-4c71-a3a8-885bd609505c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415960-kjwl6" Dec 05 18:00:00 crc kubenswrapper[4753]: I1205 18:00:00.263529 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7b665421-2d5a-4c71-a3a8-885bd609505c-secret-volume\") pod \"collect-profiles-29415960-kjwl6\" (UID: \"7b665421-2d5a-4c71-a3a8-885bd609505c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415960-kjwl6" Dec 05 18:00:00 crc kubenswrapper[4753]: I1205 18:00:00.366017 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7b665421-2d5a-4c71-a3a8-885bd609505c-config-volume\") pod \"collect-profiles-29415960-kjwl6\" (UID: \"7b665421-2d5a-4c71-a3a8-885bd609505c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415960-kjwl6" Dec 05 18:00:00 crc kubenswrapper[4753]: I1205 18:00:00.366112 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xcdcc\" (UniqueName: \"kubernetes.io/projected/7b665421-2d5a-4c71-a3a8-885bd609505c-kube-api-access-xcdcc\") pod \"collect-profiles-29415960-kjwl6\" (UID: \"7b665421-2d5a-4c71-a3a8-885bd609505c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415960-kjwl6" Dec 05 18:00:00 crc kubenswrapper[4753]: I1205 18:00:00.366185 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7b665421-2d5a-4c71-a3a8-885bd609505c-secret-volume\") pod \"collect-profiles-29415960-kjwl6\" (UID: \"7b665421-2d5a-4c71-a3a8-885bd609505c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415960-kjwl6" Dec 05 18:00:00 crc kubenswrapper[4753]: I1205 18:00:00.368451 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7b665421-2d5a-4c71-a3a8-885bd609505c-config-volume\") pod \"collect-profiles-29415960-kjwl6\" (UID: \"7b665421-2d5a-4c71-a3a8-885bd609505c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415960-kjwl6" Dec 05 18:00:00 crc kubenswrapper[4753]: I1205 18:00:00.372491 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7b665421-2d5a-4c71-a3a8-885bd609505c-secret-volume\") pod \"collect-profiles-29415960-kjwl6\" (UID: \"7b665421-2d5a-4c71-a3a8-885bd609505c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415960-kjwl6" Dec 05 18:00:00 crc kubenswrapper[4753]: I1205 18:00:00.399392 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xcdcc\" (UniqueName: \"kubernetes.io/projected/7b665421-2d5a-4c71-a3a8-885bd609505c-kube-api-access-xcdcc\") pod \"collect-profiles-29415960-kjwl6\" (UID: \"7b665421-2d5a-4c71-a3a8-885bd609505c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415960-kjwl6" Dec 05 18:00:00 crc kubenswrapper[4753]: I1205 18:00:00.476663 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415960-kjwl6" Dec 05 18:00:01 crc kubenswrapper[4753]: I1205 18:00:01.015726 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415960-kjwl6"] Dec 05 18:00:01 crc kubenswrapper[4753]: I1205 18:00:01.266353 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415960-kjwl6" event={"ID":"7b665421-2d5a-4c71-a3a8-885bd609505c","Type":"ContainerStarted","Data":"aed661e3e7dc2775e1caee7c22c4281b961ef04049e208426a23386ca97c05aa"} Dec 05 18:00:01 crc kubenswrapper[4753]: I1205 18:00:01.266686 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415960-kjwl6" event={"ID":"7b665421-2d5a-4c71-a3a8-885bd609505c","Type":"ContainerStarted","Data":"a0b191cc68f7e5be54e95a05e0cea20c3f51a953b64d38c24261e3d9cf9bde4b"} Dec 05 18:00:01 crc kubenswrapper[4753]: I1205 18:00:01.285996 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29415960-kjwl6" podStartSLOduration=1.285981904 podStartE2EDuration="1.285981904s" podCreationTimestamp="2025-12-05 18:00:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 18:00:01.280311993 +0000 UTC m=+3339.783418999" watchObservedRunningTime="2025-12-05 18:00:01.285981904 +0000 UTC m=+3339.789088910" Dec 05 18:00:02 crc kubenswrapper[4753]: I1205 18:00:02.277010 4753 generic.go:334] "Generic (PLEG): container finished" podID="7b665421-2d5a-4c71-a3a8-885bd609505c" containerID="aed661e3e7dc2775e1caee7c22c4281b961ef04049e208426a23386ca97c05aa" exitCode=0 Dec 05 18:00:02 crc kubenswrapper[4753]: I1205 18:00:02.277207 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415960-kjwl6" event={"ID":"7b665421-2d5a-4c71-a3a8-885bd609505c","Type":"ContainerDied","Data":"aed661e3e7dc2775e1caee7c22c4281b961ef04049e208426a23386ca97c05aa"} Dec 05 18:00:03 crc kubenswrapper[4753]: I1205 18:00:03.751223 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415960-kjwl6" Dec 05 18:00:03 crc kubenswrapper[4753]: I1205 18:00:03.845930 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7b665421-2d5a-4c71-a3a8-885bd609505c-secret-volume\") pod \"7b665421-2d5a-4c71-a3a8-885bd609505c\" (UID: \"7b665421-2d5a-4c71-a3a8-885bd609505c\") " Dec 05 18:00:03 crc kubenswrapper[4753]: I1205 18:00:03.846062 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7b665421-2d5a-4c71-a3a8-885bd609505c-config-volume\") pod \"7b665421-2d5a-4c71-a3a8-885bd609505c\" (UID: \"7b665421-2d5a-4c71-a3a8-885bd609505c\") " Dec 05 18:00:03 crc kubenswrapper[4753]: I1205 18:00:03.846130 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcdcc\" (UniqueName: \"kubernetes.io/projected/7b665421-2d5a-4c71-a3a8-885bd609505c-kube-api-access-xcdcc\") pod \"7b665421-2d5a-4c71-a3a8-885bd609505c\" (UID: \"7b665421-2d5a-4c71-a3a8-885bd609505c\") " Dec 05 18:00:03 crc kubenswrapper[4753]: I1205 18:00:03.846813 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7b665421-2d5a-4c71-a3a8-885bd609505c-config-volume" (OuterVolumeSpecName: "config-volume") pod "7b665421-2d5a-4c71-a3a8-885bd609505c" (UID: "7b665421-2d5a-4c71-a3a8-885bd609505c"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 18:00:03 crc kubenswrapper[4753]: I1205 18:00:03.847054 4753 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7b665421-2d5a-4c71-a3a8-885bd609505c-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 18:00:03 crc kubenswrapper[4753]: I1205 18:00:03.877313 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7b665421-2d5a-4c71-a3a8-885bd609505c-kube-api-access-xcdcc" (OuterVolumeSpecName: "kube-api-access-xcdcc") pod "7b665421-2d5a-4c71-a3a8-885bd609505c" (UID: "7b665421-2d5a-4c71-a3a8-885bd609505c"). InnerVolumeSpecName "kube-api-access-xcdcc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 18:00:03 crc kubenswrapper[4753]: I1205 18:00:03.878331 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7b665421-2d5a-4c71-a3a8-885bd609505c-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "7b665421-2d5a-4c71-a3a8-885bd609505c" (UID: "7b665421-2d5a-4c71-a3a8-885bd609505c"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:00:03 crc kubenswrapper[4753]: I1205 18:00:03.949547 4753 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7b665421-2d5a-4c71-a3a8-885bd609505c-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 18:00:03 crc kubenswrapper[4753]: I1205 18:00:03.949579 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcdcc\" (UniqueName: \"kubernetes.io/projected/7b665421-2d5a-4c71-a3a8-885bd609505c-kube-api-access-xcdcc\") on node \"crc\" DevicePath \"\"" Dec 05 18:00:04 crc kubenswrapper[4753]: I1205 18:00:04.295745 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415960-kjwl6" event={"ID":"7b665421-2d5a-4c71-a3a8-885bd609505c","Type":"ContainerDied","Data":"a0b191cc68f7e5be54e95a05e0cea20c3f51a953b64d38c24261e3d9cf9bde4b"} Dec 05 18:00:04 crc kubenswrapper[4753]: I1205 18:00:04.295981 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a0b191cc68f7e5be54e95a05e0cea20c3f51a953b64d38c24261e3d9cf9bde4b" Dec 05 18:00:04 crc kubenswrapper[4753]: I1205 18:00:04.295781 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415960-kjwl6" Dec 05 18:00:04 crc kubenswrapper[4753]: I1205 18:00:04.363788 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415915-698dn"] Dec 05 18:00:04 crc kubenswrapper[4753]: I1205 18:00:04.375386 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415915-698dn"] Dec 05 18:00:05 crc kubenswrapper[4753]: I1205 18:00:05.733507 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8ed2f6f9-8648-4fdc-9d37-9ed8ed42ba24" path="/var/lib/kubelet/pods/8ed2f6f9-8648-4fdc-9d37-9ed8ed42ba24/volumes" Dec 05 18:00:11 crc kubenswrapper[4753]: I1205 18:00:11.729641 4753 scope.go:117] "RemoveContainer" containerID="ac6f1e1e98711fb4ac9c08b197f72af512a76d659ae2deed4807bbfa0559c18e" Dec 05 18:00:11 crc kubenswrapper[4753]: E1205 18:00:11.730352 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 18:00:21 crc kubenswrapper[4753]: I1205 18:00:21.466114 4753 generic.go:334] "Generic (PLEG): container finished" podID="352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1" containerID="47d28c26cc75266b19dfe80e4d8aaa4c109ce5d16526562b4faa5e0d944ac2ff" exitCode=0 Dec 05 18:00:21 crc kubenswrapper[4753]: I1205 18:00:21.466199 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-tcnxd" event={"ID":"352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1","Type":"ContainerDied","Data":"47d28c26cc75266b19dfe80e4d8aaa4c109ce5d16526562b4faa5e0d944ac2ff"} Dec 05 18:00:23 crc kubenswrapper[4753]: I1205 18:00:23.034106 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-tcnxd" Dec 05 18:00:23 crc kubenswrapper[4753]: I1205 18:00:23.177836 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1-nova-extra-config-0\") pod \"352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1\" (UID: \"352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1\") " Dec 05 18:00:23 crc kubenswrapper[4753]: I1205 18:00:23.177932 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1-inventory\") pod \"352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1\" (UID: \"352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1\") " Dec 05 18:00:23 crc kubenswrapper[4753]: I1205 18:00:23.177995 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1-nova-migration-ssh-key-0\") pod \"352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1\" (UID: \"352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1\") " Dec 05 18:00:23 crc kubenswrapper[4753]: I1205 18:00:23.178014 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1-ssh-key\") pod \"352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1\" (UID: \"352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1\") " Dec 05 18:00:23 crc kubenswrapper[4753]: I1205 18:00:23.178057 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1-nova-cell1-compute-config-1\") pod \"352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1\" (UID: \"352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1\") " Dec 05 18:00:23 crc kubenswrapper[4753]: I1205 18:00:23.178077 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1-nova-migration-ssh-key-1\") pod \"352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1\" (UID: \"352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1\") " Dec 05 18:00:23 crc kubenswrapper[4753]: I1205 18:00:23.178095 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1-nova-cell1-compute-config-0\") pod \"352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1\" (UID: \"352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1\") " Dec 05 18:00:23 crc kubenswrapper[4753]: I1205 18:00:23.178203 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-628nb\" (UniqueName: \"kubernetes.io/projected/352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1-kube-api-access-628nb\") pod \"352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1\" (UID: \"352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1\") " Dec 05 18:00:23 crc kubenswrapper[4753]: I1205 18:00:23.178235 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1-nova-combined-ca-bundle\") pod \"352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1\" (UID: \"352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1\") " Dec 05 18:00:23 crc kubenswrapper[4753]: I1205 18:00:23.183935 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1-kube-api-access-628nb" (OuterVolumeSpecName: "kube-api-access-628nb") pod "352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1" (UID: "352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1"). InnerVolumeSpecName "kube-api-access-628nb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 18:00:23 crc kubenswrapper[4753]: I1205 18:00:23.187432 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1" (UID: "352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:00:23 crc kubenswrapper[4753]: I1205 18:00:23.214161 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1-inventory" (OuterVolumeSpecName: "inventory") pod "352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1" (UID: "352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:00:23 crc kubenswrapper[4753]: I1205 18:00:23.215834 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1" (UID: "352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:00:23 crc kubenswrapper[4753]: I1205 18:00:23.218873 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1-nova-extra-config-0" (OuterVolumeSpecName: "nova-extra-config-0") pod "352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1" (UID: "352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1"). InnerVolumeSpecName "nova-extra-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 18:00:23 crc kubenswrapper[4753]: I1205 18:00:23.220276 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1" (UID: "352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:00:23 crc kubenswrapper[4753]: I1205 18:00:23.221423 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1" (UID: "352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:00:23 crc kubenswrapper[4753]: I1205 18:00:23.222756 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1" (UID: "352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:00:23 crc kubenswrapper[4753]: I1205 18:00:23.224279 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1" (UID: "352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:00:23 crc kubenswrapper[4753]: I1205 18:00:23.280765 4753 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 18:00:23 crc kubenswrapper[4753]: I1205 18:00:23.280802 4753 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Dec 05 18:00:23 crc kubenswrapper[4753]: I1205 18:00:23.280817 4753 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 18:00:23 crc kubenswrapper[4753]: I1205 18:00:23.280833 4753 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Dec 05 18:00:23 crc kubenswrapper[4753]: I1205 18:00:23.280846 4753 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Dec 05 18:00:23 crc kubenswrapper[4753]: I1205 18:00:23.280857 4753 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Dec 05 18:00:23 crc kubenswrapper[4753]: I1205 18:00:23.280868 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-628nb\" (UniqueName: \"kubernetes.io/projected/352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1-kube-api-access-628nb\") on node \"crc\" DevicePath \"\"" Dec 05 18:00:23 crc kubenswrapper[4753]: I1205 18:00:23.280878 4753 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 18:00:23 crc kubenswrapper[4753]: I1205 18:00:23.280890 4753 reconciler_common.go:293] "Volume detached for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1-nova-extra-config-0\") on node \"crc\" DevicePath \"\"" Dec 05 18:00:23 crc kubenswrapper[4753]: I1205 18:00:23.485729 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-tcnxd" event={"ID":"352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1","Type":"ContainerDied","Data":"0e77734c0e9756904fd574c496debf6155c880ae8a867e60e7fe0bf0c9fa4db0"} Dec 05 18:00:23 crc kubenswrapper[4753]: I1205 18:00:23.485778 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0e77734c0e9756904fd574c496debf6155c880ae8a867e60e7fe0bf0c9fa4db0" Dec 05 18:00:23 crc kubenswrapper[4753]: I1205 18:00:23.485822 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-tcnxd" Dec 05 18:00:23 crc kubenswrapper[4753]: I1205 18:00:23.586747 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-bq7tl"] Dec 05 18:00:23 crc kubenswrapper[4753]: E1205 18:00:23.587574 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 05 18:00:23 crc kubenswrapper[4753]: I1205 18:00:23.587680 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 05 18:00:23 crc kubenswrapper[4753]: E1205 18:00:23.587776 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b665421-2d5a-4c71-a3a8-885bd609505c" containerName="collect-profiles" Dec 05 18:00:23 crc kubenswrapper[4753]: I1205 18:00:23.587856 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b665421-2d5a-4c71-a3a8-885bd609505c" containerName="collect-profiles" Dec 05 18:00:23 crc kubenswrapper[4753]: I1205 18:00:23.588212 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b665421-2d5a-4c71-a3a8-885bd609505c" containerName="collect-profiles" Dec 05 18:00:23 crc kubenswrapper[4753]: I1205 18:00:23.588384 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 05 18:00:23 crc kubenswrapper[4753]: I1205 18:00:23.589436 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-bq7tl" Dec 05 18:00:23 crc kubenswrapper[4753]: I1205 18:00:23.593174 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-2445p" Dec 05 18:00:23 crc kubenswrapper[4753]: I1205 18:00:23.593435 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 18:00:23 crc kubenswrapper[4753]: I1205 18:00:23.593273 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 18:00:23 crc kubenswrapper[4753]: I1205 18:00:23.593267 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 18:00:23 crc kubenswrapper[4753]: I1205 18:00:23.594073 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-compute-config-data" Dec 05 18:00:23 crc kubenswrapper[4753]: I1205 18:00:23.613682 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-bq7tl"] Dec 05 18:00:23 crc kubenswrapper[4753]: I1205 18:00:23.690660 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/9b316eb5-2fa8-4582-afdd-0b94dfe44a6f-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-bq7tl\" (UID: \"9b316eb5-2fa8-4582-afdd-0b94dfe44a6f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-bq7tl" Dec 05 18:00:23 crc kubenswrapper[4753]: I1205 18:00:23.690782 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9b316eb5-2fa8-4582-afdd-0b94dfe44a6f-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-bq7tl\" (UID: \"9b316eb5-2fa8-4582-afdd-0b94dfe44a6f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-bq7tl" Dec 05 18:00:23 crc kubenswrapper[4753]: I1205 18:00:23.690836 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/9b316eb5-2fa8-4582-afdd-0b94dfe44a6f-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-bq7tl\" (UID: \"9b316eb5-2fa8-4582-afdd-0b94dfe44a6f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-bq7tl" Dec 05 18:00:23 crc kubenswrapper[4753]: I1205 18:00:23.690909 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9b316eb5-2fa8-4582-afdd-0b94dfe44a6f-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-bq7tl\" (UID: \"9b316eb5-2fa8-4582-afdd-0b94dfe44a6f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-bq7tl" Dec 05 18:00:23 crc kubenswrapper[4753]: I1205 18:00:23.690993 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b316eb5-2fa8-4582-afdd-0b94dfe44a6f-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-bq7tl\" (UID: \"9b316eb5-2fa8-4582-afdd-0b94dfe44a6f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-bq7tl" Dec 05 18:00:23 crc kubenswrapper[4753]: I1205 18:00:23.691112 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/9b316eb5-2fa8-4582-afdd-0b94dfe44a6f-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-bq7tl\" (UID: \"9b316eb5-2fa8-4582-afdd-0b94dfe44a6f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-bq7tl" Dec 05 18:00:23 crc kubenswrapper[4753]: I1205 18:00:23.691168 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jfgw8\" (UniqueName: \"kubernetes.io/projected/9b316eb5-2fa8-4582-afdd-0b94dfe44a6f-kube-api-access-jfgw8\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-bq7tl\" (UID: \"9b316eb5-2fa8-4582-afdd-0b94dfe44a6f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-bq7tl" Dec 05 18:00:23 crc kubenswrapper[4753]: I1205 18:00:23.792778 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/9b316eb5-2fa8-4582-afdd-0b94dfe44a6f-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-bq7tl\" (UID: \"9b316eb5-2fa8-4582-afdd-0b94dfe44a6f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-bq7tl" Dec 05 18:00:23 crc kubenswrapper[4753]: I1205 18:00:23.793075 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jfgw8\" (UniqueName: \"kubernetes.io/projected/9b316eb5-2fa8-4582-afdd-0b94dfe44a6f-kube-api-access-jfgw8\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-bq7tl\" (UID: \"9b316eb5-2fa8-4582-afdd-0b94dfe44a6f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-bq7tl" Dec 05 18:00:23 crc kubenswrapper[4753]: I1205 18:00:23.793160 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/9b316eb5-2fa8-4582-afdd-0b94dfe44a6f-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-bq7tl\" (UID: \"9b316eb5-2fa8-4582-afdd-0b94dfe44a6f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-bq7tl" Dec 05 18:00:23 crc kubenswrapper[4753]: I1205 18:00:23.793204 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9b316eb5-2fa8-4582-afdd-0b94dfe44a6f-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-bq7tl\" (UID: \"9b316eb5-2fa8-4582-afdd-0b94dfe44a6f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-bq7tl" Dec 05 18:00:23 crc kubenswrapper[4753]: I1205 18:00:23.793234 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/9b316eb5-2fa8-4582-afdd-0b94dfe44a6f-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-bq7tl\" (UID: \"9b316eb5-2fa8-4582-afdd-0b94dfe44a6f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-bq7tl" Dec 05 18:00:23 crc kubenswrapper[4753]: I1205 18:00:23.793259 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9b316eb5-2fa8-4582-afdd-0b94dfe44a6f-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-bq7tl\" (UID: \"9b316eb5-2fa8-4582-afdd-0b94dfe44a6f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-bq7tl" Dec 05 18:00:23 crc kubenswrapper[4753]: I1205 18:00:23.793313 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b316eb5-2fa8-4582-afdd-0b94dfe44a6f-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-bq7tl\" (UID: \"9b316eb5-2fa8-4582-afdd-0b94dfe44a6f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-bq7tl" Dec 05 18:00:23 crc kubenswrapper[4753]: I1205 18:00:23.797083 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/9b316eb5-2fa8-4582-afdd-0b94dfe44a6f-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-bq7tl\" (UID: \"9b316eb5-2fa8-4582-afdd-0b94dfe44a6f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-bq7tl" Dec 05 18:00:23 crc kubenswrapper[4753]: I1205 18:00:23.797084 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/9b316eb5-2fa8-4582-afdd-0b94dfe44a6f-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-bq7tl\" (UID: \"9b316eb5-2fa8-4582-afdd-0b94dfe44a6f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-bq7tl" Dec 05 18:00:23 crc kubenswrapper[4753]: I1205 18:00:23.797133 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9b316eb5-2fa8-4582-afdd-0b94dfe44a6f-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-bq7tl\" (UID: \"9b316eb5-2fa8-4582-afdd-0b94dfe44a6f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-bq7tl" Dec 05 18:00:23 crc kubenswrapper[4753]: I1205 18:00:23.798255 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b316eb5-2fa8-4582-afdd-0b94dfe44a6f-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-bq7tl\" (UID: \"9b316eb5-2fa8-4582-afdd-0b94dfe44a6f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-bq7tl" Dec 05 18:00:23 crc kubenswrapper[4753]: I1205 18:00:23.799786 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9b316eb5-2fa8-4582-afdd-0b94dfe44a6f-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-bq7tl\" (UID: \"9b316eb5-2fa8-4582-afdd-0b94dfe44a6f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-bq7tl" Dec 05 18:00:23 crc kubenswrapper[4753]: I1205 18:00:23.808935 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/9b316eb5-2fa8-4582-afdd-0b94dfe44a6f-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-bq7tl\" (UID: \"9b316eb5-2fa8-4582-afdd-0b94dfe44a6f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-bq7tl" Dec 05 18:00:23 crc kubenswrapper[4753]: I1205 18:00:23.810961 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jfgw8\" (UniqueName: \"kubernetes.io/projected/9b316eb5-2fa8-4582-afdd-0b94dfe44a6f-kube-api-access-jfgw8\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-bq7tl\" (UID: \"9b316eb5-2fa8-4582-afdd-0b94dfe44a6f\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-bq7tl" Dec 05 18:00:23 crc kubenswrapper[4753]: I1205 18:00:23.912096 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-bq7tl" Dec 05 18:00:24 crc kubenswrapper[4753]: I1205 18:00:24.479518 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-bq7tl"] Dec 05 18:00:24 crc kubenswrapper[4753]: I1205 18:00:24.721035 4753 scope.go:117] "RemoveContainer" containerID="ac6f1e1e98711fb4ac9c08b197f72af512a76d659ae2deed4807bbfa0559c18e" Dec 05 18:00:24 crc kubenswrapper[4753]: E1205 18:00:24.721469 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 18:00:25 crc kubenswrapper[4753]: I1205 18:00:25.516121 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-bq7tl" event={"ID":"9b316eb5-2fa8-4582-afdd-0b94dfe44a6f","Type":"ContainerStarted","Data":"c43d778fc0bcc6eb7121c7b8ed045e66a22d00cc4a6a208df4d5d567cfbcecf0"} Dec 05 18:00:25 crc kubenswrapper[4753]: I1205 18:00:25.516524 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-bq7tl" event={"ID":"9b316eb5-2fa8-4582-afdd-0b94dfe44a6f","Type":"ContainerStarted","Data":"9ace527330edd3bec26ce517707000791e5c224597c752d19c0934d5f7a816bb"} Dec 05 18:00:25 crc kubenswrapper[4753]: I1205 18:00:25.549963 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-bq7tl" podStartSLOduration=2.056048907 podStartE2EDuration="2.549943705s" podCreationTimestamp="2025-12-05 18:00:23 +0000 UTC" firstStartedPulling="2025-12-05 18:00:24.496366302 +0000 UTC m=+3362.999473328" lastFinishedPulling="2025-12-05 18:00:24.99026108 +0000 UTC m=+3363.493368126" observedRunningTime="2025-12-05 18:00:25.534520238 +0000 UTC m=+3364.037627284" watchObservedRunningTime="2025-12-05 18:00:25.549943705 +0000 UTC m=+3364.053050711" Dec 05 18:00:31 crc kubenswrapper[4753]: I1205 18:00:31.858612 4753 scope.go:117] "RemoveContainer" containerID="3d51108761e9c69403371d5b0a4bd55ebe069ce1a1d6399b50b88c15896ab41f" Dec 05 18:00:35 crc kubenswrapper[4753]: I1205 18:00:35.720820 4753 scope.go:117] "RemoveContainer" containerID="ac6f1e1e98711fb4ac9c08b197f72af512a76d659ae2deed4807bbfa0559c18e" Dec 05 18:00:35 crc kubenswrapper[4753]: E1205 18:00:35.721688 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 18:00:40 crc kubenswrapper[4753]: I1205 18:00:40.425433 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-457gg"] Dec 05 18:00:40 crc kubenswrapper[4753]: I1205 18:00:40.429786 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-457gg" Dec 05 18:00:40 crc kubenswrapper[4753]: I1205 18:00:40.442222 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-457gg"] Dec 05 18:00:40 crc kubenswrapper[4753]: I1205 18:00:40.579062 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/66e797fd-98f1-4c62-9f39-d3f1671d7e4e-catalog-content\") pod \"redhat-marketplace-457gg\" (UID: \"66e797fd-98f1-4c62-9f39-d3f1671d7e4e\") " pod="openshift-marketplace/redhat-marketplace-457gg" Dec 05 18:00:40 crc kubenswrapper[4753]: I1205 18:00:40.579421 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hb9pk\" (UniqueName: \"kubernetes.io/projected/66e797fd-98f1-4c62-9f39-d3f1671d7e4e-kube-api-access-hb9pk\") pod \"redhat-marketplace-457gg\" (UID: \"66e797fd-98f1-4c62-9f39-d3f1671d7e4e\") " pod="openshift-marketplace/redhat-marketplace-457gg" Dec 05 18:00:40 crc kubenswrapper[4753]: I1205 18:00:40.579463 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/66e797fd-98f1-4c62-9f39-d3f1671d7e4e-utilities\") pod \"redhat-marketplace-457gg\" (UID: \"66e797fd-98f1-4c62-9f39-d3f1671d7e4e\") " pod="openshift-marketplace/redhat-marketplace-457gg" Dec 05 18:00:40 crc kubenswrapper[4753]: I1205 18:00:40.682291 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/66e797fd-98f1-4c62-9f39-d3f1671d7e4e-catalog-content\") pod \"redhat-marketplace-457gg\" (UID: \"66e797fd-98f1-4c62-9f39-d3f1671d7e4e\") " pod="openshift-marketplace/redhat-marketplace-457gg" Dec 05 18:00:40 crc kubenswrapper[4753]: I1205 18:00:40.682786 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/66e797fd-98f1-4c62-9f39-d3f1671d7e4e-catalog-content\") pod \"redhat-marketplace-457gg\" (UID: \"66e797fd-98f1-4c62-9f39-d3f1671d7e4e\") " pod="openshift-marketplace/redhat-marketplace-457gg" Dec 05 18:00:40 crc kubenswrapper[4753]: I1205 18:00:40.682829 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hb9pk\" (UniqueName: \"kubernetes.io/projected/66e797fd-98f1-4c62-9f39-d3f1671d7e4e-kube-api-access-hb9pk\") pod \"redhat-marketplace-457gg\" (UID: \"66e797fd-98f1-4c62-9f39-d3f1671d7e4e\") " pod="openshift-marketplace/redhat-marketplace-457gg" Dec 05 18:00:40 crc kubenswrapper[4753]: I1205 18:00:40.682924 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/66e797fd-98f1-4c62-9f39-d3f1671d7e4e-utilities\") pod \"redhat-marketplace-457gg\" (UID: \"66e797fd-98f1-4c62-9f39-d3f1671d7e4e\") " pod="openshift-marketplace/redhat-marketplace-457gg" Dec 05 18:00:40 crc kubenswrapper[4753]: I1205 18:00:40.683237 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/66e797fd-98f1-4c62-9f39-d3f1671d7e4e-utilities\") pod \"redhat-marketplace-457gg\" (UID: \"66e797fd-98f1-4c62-9f39-d3f1671d7e4e\") " pod="openshift-marketplace/redhat-marketplace-457gg" Dec 05 18:00:40 crc kubenswrapper[4753]: I1205 18:00:40.713931 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hb9pk\" (UniqueName: \"kubernetes.io/projected/66e797fd-98f1-4c62-9f39-d3f1671d7e4e-kube-api-access-hb9pk\") pod \"redhat-marketplace-457gg\" (UID: \"66e797fd-98f1-4c62-9f39-d3f1671d7e4e\") " pod="openshift-marketplace/redhat-marketplace-457gg" Dec 05 18:00:40 crc kubenswrapper[4753]: I1205 18:00:40.764259 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-457gg" Dec 05 18:00:41 crc kubenswrapper[4753]: W1205 18:00:41.288845 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod66e797fd_98f1_4c62_9f39_d3f1671d7e4e.slice/crio-77b8ae25bd88b85386c90324c34563130bf7e5ddf188b616d5ef8829027347f1 WatchSource:0}: Error finding container 77b8ae25bd88b85386c90324c34563130bf7e5ddf188b616d5ef8829027347f1: Status 404 returned error can't find the container with id 77b8ae25bd88b85386c90324c34563130bf7e5ddf188b616d5ef8829027347f1 Dec 05 18:00:41 crc kubenswrapper[4753]: I1205 18:00:41.291737 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-457gg"] Dec 05 18:00:41 crc kubenswrapper[4753]: I1205 18:00:41.675830 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-457gg" event={"ID":"66e797fd-98f1-4c62-9f39-d3f1671d7e4e","Type":"ContainerStarted","Data":"77b8ae25bd88b85386c90324c34563130bf7e5ddf188b616d5ef8829027347f1"} Dec 05 18:00:42 crc kubenswrapper[4753]: I1205 18:00:42.689342 4753 generic.go:334] "Generic (PLEG): container finished" podID="66e797fd-98f1-4c62-9f39-d3f1671d7e4e" containerID="d06e225b9e82368a07492af0c9fc120dcb2cbea86ce94f3a6cfea26b4e9f362a" exitCode=0 Dec 05 18:00:42 crc kubenswrapper[4753]: I1205 18:00:42.689452 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-457gg" event={"ID":"66e797fd-98f1-4c62-9f39-d3f1671d7e4e","Type":"ContainerDied","Data":"d06e225b9e82368a07492af0c9fc120dcb2cbea86ce94f3a6cfea26b4e9f362a"} Dec 05 18:00:43 crc kubenswrapper[4753]: I1205 18:00:43.700115 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-457gg" event={"ID":"66e797fd-98f1-4c62-9f39-d3f1671d7e4e","Type":"ContainerStarted","Data":"7ce39d766829592358f3e1b10ddbeef9445e561a74b5d70f9bbfc131d4c78a71"} Dec 05 18:00:44 crc kubenswrapper[4753]: I1205 18:00:44.712250 4753 generic.go:334] "Generic (PLEG): container finished" podID="66e797fd-98f1-4c62-9f39-d3f1671d7e4e" containerID="7ce39d766829592358f3e1b10ddbeef9445e561a74b5d70f9bbfc131d4c78a71" exitCode=0 Dec 05 18:00:44 crc kubenswrapper[4753]: I1205 18:00:44.712322 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-457gg" event={"ID":"66e797fd-98f1-4c62-9f39-d3f1671d7e4e","Type":"ContainerDied","Data":"7ce39d766829592358f3e1b10ddbeef9445e561a74b5d70f9bbfc131d4c78a71"} Dec 05 18:00:47 crc kubenswrapper[4753]: I1205 18:00:47.721221 4753 scope.go:117] "RemoveContainer" containerID="ac6f1e1e98711fb4ac9c08b197f72af512a76d659ae2deed4807bbfa0559c18e" Dec 05 18:00:47 crc kubenswrapper[4753]: E1205 18:00:47.721987 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 18:00:47 crc kubenswrapper[4753]: I1205 18:00:47.744574 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-457gg" event={"ID":"66e797fd-98f1-4c62-9f39-d3f1671d7e4e","Type":"ContainerStarted","Data":"53f5130568fead0ff3f1510132a6239584a349cb79c819caff83c6e143ece7c8"} Dec 05 18:00:47 crc kubenswrapper[4753]: I1205 18:00:47.768515 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-457gg" podStartSLOduration=5.168200797 podStartE2EDuration="7.768493287s" podCreationTimestamp="2025-12-05 18:00:40 +0000 UTC" firstStartedPulling="2025-12-05 18:00:42.691496203 +0000 UTC m=+3381.194603209" lastFinishedPulling="2025-12-05 18:00:45.291788683 +0000 UTC m=+3383.794895699" observedRunningTime="2025-12-05 18:00:47.759829991 +0000 UTC m=+3386.262937007" watchObservedRunningTime="2025-12-05 18:00:47.768493287 +0000 UTC m=+3386.271600303" Dec 05 18:00:50 crc kubenswrapper[4753]: I1205 18:00:50.764379 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-457gg" Dec 05 18:00:50 crc kubenswrapper[4753]: I1205 18:00:50.765456 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-457gg" Dec 05 18:00:50 crc kubenswrapper[4753]: I1205 18:00:50.807486 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-457gg" Dec 05 18:00:58 crc kubenswrapper[4753]: I1205 18:00:58.720250 4753 scope.go:117] "RemoveContainer" containerID="ac6f1e1e98711fb4ac9c08b197f72af512a76d659ae2deed4807bbfa0559c18e" Dec 05 18:00:58 crc kubenswrapper[4753]: E1205 18:00:58.720977 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 18:01:00 crc kubenswrapper[4753]: I1205 18:01:00.157085 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29415961-8rgrd"] Dec 05 18:01:00 crc kubenswrapper[4753]: I1205 18:01:00.158910 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29415961-8rgrd" Dec 05 18:01:00 crc kubenswrapper[4753]: I1205 18:01:00.172951 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29415961-8rgrd"] Dec 05 18:01:00 crc kubenswrapper[4753]: I1205 18:01:00.253875 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/487332b8-9414-4d94-b52e-9deb57aaf729-config-data\") pod \"keystone-cron-29415961-8rgrd\" (UID: \"487332b8-9414-4d94-b52e-9deb57aaf729\") " pod="openstack/keystone-cron-29415961-8rgrd" Dec 05 18:01:00 crc kubenswrapper[4753]: I1205 18:01:00.253964 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/487332b8-9414-4d94-b52e-9deb57aaf729-fernet-keys\") pod \"keystone-cron-29415961-8rgrd\" (UID: \"487332b8-9414-4d94-b52e-9deb57aaf729\") " pod="openstack/keystone-cron-29415961-8rgrd" Dec 05 18:01:00 crc kubenswrapper[4753]: I1205 18:01:00.254041 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lmmc2\" (UniqueName: \"kubernetes.io/projected/487332b8-9414-4d94-b52e-9deb57aaf729-kube-api-access-lmmc2\") pod \"keystone-cron-29415961-8rgrd\" (UID: \"487332b8-9414-4d94-b52e-9deb57aaf729\") " pod="openstack/keystone-cron-29415961-8rgrd" Dec 05 18:01:00 crc kubenswrapper[4753]: I1205 18:01:00.254077 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/487332b8-9414-4d94-b52e-9deb57aaf729-combined-ca-bundle\") pod \"keystone-cron-29415961-8rgrd\" (UID: \"487332b8-9414-4d94-b52e-9deb57aaf729\") " pod="openstack/keystone-cron-29415961-8rgrd" Dec 05 18:01:00 crc kubenswrapper[4753]: I1205 18:01:00.356369 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lmmc2\" (UniqueName: \"kubernetes.io/projected/487332b8-9414-4d94-b52e-9deb57aaf729-kube-api-access-lmmc2\") pod \"keystone-cron-29415961-8rgrd\" (UID: \"487332b8-9414-4d94-b52e-9deb57aaf729\") " pod="openstack/keystone-cron-29415961-8rgrd" Dec 05 18:01:00 crc kubenswrapper[4753]: I1205 18:01:00.356742 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/487332b8-9414-4d94-b52e-9deb57aaf729-combined-ca-bundle\") pod \"keystone-cron-29415961-8rgrd\" (UID: \"487332b8-9414-4d94-b52e-9deb57aaf729\") " pod="openstack/keystone-cron-29415961-8rgrd" Dec 05 18:01:00 crc kubenswrapper[4753]: I1205 18:01:00.357651 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/487332b8-9414-4d94-b52e-9deb57aaf729-config-data\") pod \"keystone-cron-29415961-8rgrd\" (UID: \"487332b8-9414-4d94-b52e-9deb57aaf729\") " pod="openstack/keystone-cron-29415961-8rgrd" Dec 05 18:01:00 crc kubenswrapper[4753]: I1205 18:01:00.357740 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/487332b8-9414-4d94-b52e-9deb57aaf729-fernet-keys\") pod \"keystone-cron-29415961-8rgrd\" (UID: \"487332b8-9414-4d94-b52e-9deb57aaf729\") " pod="openstack/keystone-cron-29415961-8rgrd" Dec 05 18:01:00 crc kubenswrapper[4753]: I1205 18:01:00.364107 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/487332b8-9414-4d94-b52e-9deb57aaf729-fernet-keys\") pod \"keystone-cron-29415961-8rgrd\" (UID: \"487332b8-9414-4d94-b52e-9deb57aaf729\") " pod="openstack/keystone-cron-29415961-8rgrd" Dec 05 18:01:00 crc kubenswrapper[4753]: I1205 18:01:00.368648 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/487332b8-9414-4d94-b52e-9deb57aaf729-config-data\") pod \"keystone-cron-29415961-8rgrd\" (UID: \"487332b8-9414-4d94-b52e-9deb57aaf729\") " pod="openstack/keystone-cron-29415961-8rgrd" Dec 05 18:01:00 crc kubenswrapper[4753]: I1205 18:01:00.373125 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lmmc2\" (UniqueName: \"kubernetes.io/projected/487332b8-9414-4d94-b52e-9deb57aaf729-kube-api-access-lmmc2\") pod \"keystone-cron-29415961-8rgrd\" (UID: \"487332b8-9414-4d94-b52e-9deb57aaf729\") " pod="openstack/keystone-cron-29415961-8rgrd" Dec 05 18:01:00 crc kubenswrapper[4753]: I1205 18:01:00.380894 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/487332b8-9414-4d94-b52e-9deb57aaf729-combined-ca-bundle\") pod \"keystone-cron-29415961-8rgrd\" (UID: \"487332b8-9414-4d94-b52e-9deb57aaf729\") " pod="openstack/keystone-cron-29415961-8rgrd" Dec 05 18:01:00 crc kubenswrapper[4753]: I1205 18:01:00.484599 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29415961-8rgrd" Dec 05 18:01:00 crc kubenswrapper[4753]: I1205 18:01:00.846436 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-457gg" Dec 05 18:01:00 crc kubenswrapper[4753]: I1205 18:01:00.894203 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-457gg"] Dec 05 18:01:00 crc kubenswrapper[4753]: I1205 18:01:00.894464 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-457gg" podUID="66e797fd-98f1-4c62-9f39-d3f1671d7e4e" containerName="registry-server" containerID="cri-o://53f5130568fead0ff3f1510132a6239584a349cb79c819caff83c6e143ece7c8" gracePeriod=2 Dec 05 18:01:00 crc kubenswrapper[4753]: I1205 18:01:00.972434 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29415961-8rgrd"] Dec 05 18:01:01 crc kubenswrapper[4753]: I1205 18:01:01.302065 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-457gg" Dec 05 18:01:01 crc kubenswrapper[4753]: I1205 18:01:01.382998 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/66e797fd-98f1-4c62-9f39-d3f1671d7e4e-catalog-content\") pod \"66e797fd-98f1-4c62-9f39-d3f1671d7e4e\" (UID: \"66e797fd-98f1-4c62-9f39-d3f1671d7e4e\") " Dec 05 18:01:01 crc kubenswrapper[4753]: I1205 18:01:01.383062 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/66e797fd-98f1-4c62-9f39-d3f1671d7e4e-utilities\") pod \"66e797fd-98f1-4c62-9f39-d3f1671d7e4e\" (UID: \"66e797fd-98f1-4c62-9f39-d3f1671d7e4e\") " Dec 05 18:01:01 crc kubenswrapper[4753]: I1205 18:01:01.383140 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hb9pk\" (UniqueName: \"kubernetes.io/projected/66e797fd-98f1-4c62-9f39-d3f1671d7e4e-kube-api-access-hb9pk\") pod \"66e797fd-98f1-4c62-9f39-d3f1671d7e4e\" (UID: \"66e797fd-98f1-4c62-9f39-d3f1671d7e4e\") " Dec 05 18:01:01 crc kubenswrapper[4753]: I1205 18:01:01.384176 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/66e797fd-98f1-4c62-9f39-d3f1671d7e4e-utilities" (OuterVolumeSpecName: "utilities") pod "66e797fd-98f1-4c62-9f39-d3f1671d7e4e" (UID: "66e797fd-98f1-4c62-9f39-d3f1671d7e4e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 18:01:01 crc kubenswrapper[4753]: I1205 18:01:01.387568 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/66e797fd-98f1-4c62-9f39-d3f1671d7e4e-kube-api-access-hb9pk" (OuterVolumeSpecName: "kube-api-access-hb9pk") pod "66e797fd-98f1-4c62-9f39-d3f1671d7e4e" (UID: "66e797fd-98f1-4c62-9f39-d3f1671d7e4e"). InnerVolumeSpecName "kube-api-access-hb9pk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 18:01:01 crc kubenswrapper[4753]: I1205 18:01:01.409548 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/66e797fd-98f1-4c62-9f39-d3f1671d7e4e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "66e797fd-98f1-4c62-9f39-d3f1671d7e4e" (UID: "66e797fd-98f1-4c62-9f39-d3f1671d7e4e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 18:01:01 crc kubenswrapper[4753]: I1205 18:01:01.485333 4753 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/66e797fd-98f1-4c62-9f39-d3f1671d7e4e-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 18:01:01 crc kubenswrapper[4753]: I1205 18:01:01.485359 4753 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/66e797fd-98f1-4c62-9f39-d3f1671d7e4e-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 18:01:01 crc kubenswrapper[4753]: I1205 18:01:01.485369 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hb9pk\" (UniqueName: \"kubernetes.io/projected/66e797fd-98f1-4c62-9f39-d3f1671d7e4e-kube-api-access-hb9pk\") on node \"crc\" DevicePath \"\"" Dec 05 18:01:01 crc kubenswrapper[4753]: I1205 18:01:01.887786 4753 generic.go:334] "Generic (PLEG): container finished" podID="66e797fd-98f1-4c62-9f39-d3f1671d7e4e" containerID="53f5130568fead0ff3f1510132a6239584a349cb79c819caff83c6e143ece7c8" exitCode=0 Dec 05 18:01:01 crc kubenswrapper[4753]: I1205 18:01:01.887843 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-457gg" event={"ID":"66e797fd-98f1-4c62-9f39-d3f1671d7e4e","Type":"ContainerDied","Data":"53f5130568fead0ff3f1510132a6239584a349cb79c819caff83c6e143ece7c8"} Dec 05 18:01:01 crc kubenswrapper[4753]: I1205 18:01:01.887901 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-457gg" event={"ID":"66e797fd-98f1-4c62-9f39-d3f1671d7e4e","Type":"ContainerDied","Data":"77b8ae25bd88b85386c90324c34563130bf7e5ddf188b616d5ef8829027347f1"} Dec 05 18:01:01 crc kubenswrapper[4753]: I1205 18:01:01.887927 4753 scope.go:117] "RemoveContainer" containerID="53f5130568fead0ff3f1510132a6239584a349cb79c819caff83c6e143ece7c8" Dec 05 18:01:01 crc kubenswrapper[4753]: I1205 18:01:01.888585 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-457gg" Dec 05 18:01:01 crc kubenswrapper[4753]: I1205 18:01:01.889825 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29415961-8rgrd" event={"ID":"487332b8-9414-4d94-b52e-9deb57aaf729","Type":"ContainerStarted","Data":"8c74337e81649a4299679faf46abd9a4af22c3932bdc18b64ee2cca960949d28"} Dec 05 18:01:01 crc kubenswrapper[4753]: I1205 18:01:01.889865 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29415961-8rgrd" event={"ID":"487332b8-9414-4d94-b52e-9deb57aaf729","Type":"ContainerStarted","Data":"a71d5da1adcd128dcda59a8b5fdcf1798ba99443ba64467340e19ff870d2ed24"} Dec 05 18:01:01 crc kubenswrapper[4753]: I1205 18:01:01.921503 4753 scope.go:117] "RemoveContainer" containerID="7ce39d766829592358f3e1b10ddbeef9445e561a74b5d70f9bbfc131d4c78a71" Dec 05 18:01:01 crc kubenswrapper[4753]: I1205 18:01:01.943025 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29415961-8rgrd" podStartSLOduration=1.9430015840000001 podStartE2EDuration="1.943001584s" podCreationTimestamp="2025-12-05 18:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 18:01:01.924258632 +0000 UTC m=+3400.427365658" watchObservedRunningTime="2025-12-05 18:01:01.943001584 +0000 UTC m=+3400.446108590" Dec 05 18:01:01 crc kubenswrapper[4753]: I1205 18:01:01.966616 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-457gg"] Dec 05 18:01:01 crc kubenswrapper[4753]: I1205 18:01:01.976325 4753 scope.go:117] "RemoveContainer" containerID="d06e225b9e82368a07492af0c9fc120dcb2cbea86ce94f3a6cfea26b4e9f362a" Dec 05 18:01:01 crc kubenswrapper[4753]: I1205 18:01:01.978804 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-457gg"] Dec 05 18:01:02 crc kubenswrapper[4753]: I1205 18:01:02.026179 4753 scope.go:117] "RemoveContainer" containerID="53f5130568fead0ff3f1510132a6239584a349cb79c819caff83c6e143ece7c8" Dec 05 18:01:02 crc kubenswrapper[4753]: E1205 18:01:02.027302 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"53f5130568fead0ff3f1510132a6239584a349cb79c819caff83c6e143ece7c8\": container with ID starting with 53f5130568fead0ff3f1510132a6239584a349cb79c819caff83c6e143ece7c8 not found: ID does not exist" containerID="53f5130568fead0ff3f1510132a6239584a349cb79c819caff83c6e143ece7c8" Dec 05 18:01:02 crc kubenswrapper[4753]: I1205 18:01:02.027349 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"53f5130568fead0ff3f1510132a6239584a349cb79c819caff83c6e143ece7c8"} err="failed to get container status \"53f5130568fead0ff3f1510132a6239584a349cb79c819caff83c6e143ece7c8\": rpc error: code = NotFound desc = could not find container \"53f5130568fead0ff3f1510132a6239584a349cb79c819caff83c6e143ece7c8\": container with ID starting with 53f5130568fead0ff3f1510132a6239584a349cb79c819caff83c6e143ece7c8 not found: ID does not exist" Dec 05 18:01:02 crc kubenswrapper[4753]: I1205 18:01:02.027376 4753 scope.go:117] "RemoveContainer" containerID="7ce39d766829592358f3e1b10ddbeef9445e561a74b5d70f9bbfc131d4c78a71" Dec 05 18:01:02 crc kubenswrapper[4753]: E1205 18:01:02.029017 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7ce39d766829592358f3e1b10ddbeef9445e561a74b5d70f9bbfc131d4c78a71\": container with ID starting with 7ce39d766829592358f3e1b10ddbeef9445e561a74b5d70f9bbfc131d4c78a71 not found: ID does not exist" containerID="7ce39d766829592358f3e1b10ddbeef9445e561a74b5d70f9bbfc131d4c78a71" Dec 05 18:01:02 crc kubenswrapper[4753]: I1205 18:01:02.029065 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7ce39d766829592358f3e1b10ddbeef9445e561a74b5d70f9bbfc131d4c78a71"} err="failed to get container status \"7ce39d766829592358f3e1b10ddbeef9445e561a74b5d70f9bbfc131d4c78a71\": rpc error: code = NotFound desc = could not find container \"7ce39d766829592358f3e1b10ddbeef9445e561a74b5d70f9bbfc131d4c78a71\": container with ID starting with 7ce39d766829592358f3e1b10ddbeef9445e561a74b5d70f9bbfc131d4c78a71 not found: ID does not exist" Dec 05 18:01:02 crc kubenswrapper[4753]: I1205 18:01:02.029099 4753 scope.go:117] "RemoveContainer" containerID="d06e225b9e82368a07492af0c9fc120dcb2cbea86ce94f3a6cfea26b4e9f362a" Dec 05 18:01:02 crc kubenswrapper[4753]: E1205 18:01:02.029412 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d06e225b9e82368a07492af0c9fc120dcb2cbea86ce94f3a6cfea26b4e9f362a\": container with ID starting with d06e225b9e82368a07492af0c9fc120dcb2cbea86ce94f3a6cfea26b4e9f362a not found: ID does not exist" containerID="d06e225b9e82368a07492af0c9fc120dcb2cbea86ce94f3a6cfea26b4e9f362a" Dec 05 18:01:02 crc kubenswrapper[4753]: I1205 18:01:02.029437 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d06e225b9e82368a07492af0c9fc120dcb2cbea86ce94f3a6cfea26b4e9f362a"} err="failed to get container status \"d06e225b9e82368a07492af0c9fc120dcb2cbea86ce94f3a6cfea26b4e9f362a\": rpc error: code = NotFound desc = could not find container \"d06e225b9e82368a07492af0c9fc120dcb2cbea86ce94f3a6cfea26b4e9f362a\": container with ID starting with d06e225b9e82368a07492af0c9fc120dcb2cbea86ce94f3a6cfea26b4e9f362a not found: ID does not exist" Dec 05 18:01:03 crc kubenswrapper[4753]: I1205 18:01:03.730700 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="66e797fd-98f1-4c62-9f39-d3f1671d7e4e" path="/var/lib/kubelet/pods/66e797fd-98f1-4c62-9f39-d3f1671d7e4e/volumes" Dec 05 18:01:03 crc kubenswrapper[4753]: I1205 18:01:03.911279 4753 generic.go:334] "Generic (PLEG): container finished" podID="487332b8-9414-4d94-b52e-9deb57aaf729" containerID="8c74337e81649a4299679faf46abd9a4af22c3932bdc18b64ee2cca960949d28" exitCode=0 Dec 05 18:01:03 crc kubenswrapper[4753]: I1205 18:01:03.911326 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29415961-8rgrd" event={"ID":"487332b8-9414-4d94-b52e-9deb57aaf729","Type":"ContainerDied","Data":"8c74337e81649a4299679faf46abd9a4af22c3932bdc18b64ee2cca960949d28"} Dec 05 18:01:05 crc kubenswrapper[4753]: I1205 18:01:05.409235 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29415961-8rgrd" Dec 05 18:01:05 crc kubenswrapper[4753]: I1205 18:01:05.469286 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/487332b8-9414-4d94-b52e-9deb57aaf729-combined-ca-bundle\") pod \"487332b8-9414-4d94-b52e-9deb57aaf729\" (UID: \"487332b8-9414-4d94-b52e-9deb57aaf729\") " Dec 05 18:01:05 crc kubenswrapper[4753]: I1205 18:01:05.469405 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lmmc2\" (UniqueName: \"kubernetes.io/projected/487332b8-9414-4d94-b52e-9deb57aaf729-kube-api-access-lmmc2\") pod \"487332b8-9414-4d94-b52e-9deb57aaf729\" (UID: \"487332b8-9414-4d94-b52e-9deb57aaf729\") " Dec 05 18:01:05 crc kubenswrapper[4753]: I1205 18:01:05.469580 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/487332b8-9414-4d94-b52e-9deb57aaf729-config-data\") pod \"487332b8-9414-4d94-b52e-9deb57aaf729\" (UID: \"487332b8-9414-4d94-b52e-9deb57aaf729\") " Dec 05 18:01:05 crc kubenswrapper[4753]: I1205 18:01:05.469594 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/487332b8-9414-4d94-b52e-9deb57aaf729-fernet-keys\") pod \"487332b8-9414-4d94-b52e-9deb57aaf729\" (UID: \"487332b8-9414-4d94-b52e-9deb57aaf729\") " Dec 05 18:01:05 crc kubenswrapper[4753]: I1205 18:01:05.476397 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/487332b8-9414-4d94-b52e-9deb57aaf729-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "487332b8-9414-4d94-b52e-9deb57aaf729" (UID: "487332b8-9414-4d94-b52e-9deb57aaf729"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:01:05 crc kubenswrapper[4753]: I1205 18:01:05.481505 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/487332b8-9414-4d94-b52e-9deb57aaf729-kube-api-access-lmmc2" (OuterVolumeSpecName: "kube-api-access-lmmc2") pod "487332b8-9414-4d94-b52e-9deb57aaf729" (UID: "487332b8-9414-4d94-b52e-9deb57aaf729"). InnerVolumeSpecName "kube-api-access-lmmc2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 18:01:05 crc kubenswrapper[4753]: I1205 18:01:05.528505 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/487332b8-9414-4d94-b52e-9deb57aaf729-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "487332b8-9414-4d94-b52e-9deb57aaf729" (UID: "487332b8-9414-4d94-b52e-9deb57aaf729"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:01:05 crc kubenswrapper[4753]: I1205 18:01:05.572272 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lmmc2\" (UniqueName: \"kubernetes.io/projected/487332b8-9414-4d94-b52e-9deb57aaf729-kube-api-access-lmmc2\") on node \"crc\" DevicePath \"\"" Dec 05 18:01:05 crc kubenswrapper[4753]: I1205 18:01:05.572307 4753 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/487332b8-9414-4d94-b52e-9deb57aaf729-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 05 18:01:05 crc kubenswrapper[4753]: I1205 18:01:05.572318 4753 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/487332b8-9414-4d94-b52e-9deb57aaf729-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 18:01:05 crc kubenswrapper[4753]: I1205 18:01:05.577105 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/487332b8-9414-4d94-b52e-9deb57aaf729-config-data" (OuterVolumeSpecName: "config-data") pod "487332b8-9414-4d94-b52e-9deb57aaf729" (UID: "487332b8-9414-4d94-b52e-9deb57aaf729"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:01:05 crc kubenswrapper[4753]: I1205 18:01:05.673906 4753 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/487332b8-9414-4d94-b52e-9deb57aaf729-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 18:01:05 crc kubenswrapper[4753]: I1205 18:01:05.935511 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29415961-8rgrd" event={"ID":"487332b8-9414-4d94-b52e-9deb57aaf729","Type":"ContainerDied","Data":"a71d5da1adcd128dcda59a8b5fdcf1798ba99443ba64467340e19ff870d2ed24"} Dec 05 18:01:05 crc kubenswrapper[4753]: I1205 18:01:05.935580 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29415961-8rgrd" Dec 05 18:01:05 crc kubenswrapper[4753]: I1205 18:01:05.935595 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a71d5da1adcd128dcda59a8b5fdcf1798ba99443ba64467340e19ff870d2ed24" Dec 05 18:01:12 crc kubenswrapper[4753]: I1205 18:01:12.720522 4753 scope.go:117] "RemoveContainer" containerID="ac6f1e1e98711fb4ac9c08b197f72af512a76d659ae2deed4807bbfa0559c18e" Dec 05 18:01:12 crc kubenswrapper[4753]: E1205 18:01:12.721218 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 18:01:20 crc kubenswrapper[4753]: I1205 18:01:20.652667 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-fntrs"] Dec 05 18:01:20 crc kubenswrapper[4753]: E1205 18:01:20.653837 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="487332b8-9414-4d94-b52e-9deb57aaf729" containerName="keystone-cron" Dec 05 18:01:20 crc kubenswrapper[4753]: I1205 18:01:20.653853 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="487332b8-9414-4d94-b52e-9deb57aaf729" containerName="keystone-cron" Dec 05 18:01:20 crc kubenswrapper[4753]: E1205 18:01:20.653864 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66e797fd-98f1-4c62-9f39-d3f1671d7e4e" containerName="extract-content" Dec 05 18:01:20 crc kubenswrapper[4753]: I1205 18:01:20.653872 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="66e797fd-98f1-4c62-9f39-d3f1671d7e4e" containerName="extract-content" Dec 05 18:01:20 crc kubenswrapper[4753]: E1205 18:01:20.653903 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66e797fd-98f1-4c62-9f39-d3f1671d7e4e" containerName="extract-utilities" Dec 05 18:01:20 crc kubenswrapper[4753]: I1205 18:01:20.653911 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="66e797fd-98f1-4c62-9f39-d3f1671d7e4e" containerName="extract-utilities" Dec 05 18:01:20 crc kubenswrapper[4753]: E1205 18:01:20.653922 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66e797fd-98f1-4c62-9f39-d3f1671d7e4e" containerName="registry-server" Dec 05 18:01:20 crc kubenswrapper[4753]: I1205 18:01:20.653928 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="66e797fd-98f1-4c62-9f39-d3f1671d7e4e" containerName="registry-server" Dec 05 18:01:20 crc kubenswrapper[4753]: I1205 18:01:20.654374 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="66e797fd-98f1-4c62-9f39-d3f1671d7e4e" containerName="registry-server" Dec 05 18:01:20 crc kubenswrapper[4753]: I1205 18:01:20.654399 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="487332b8-9414-4d94-b52e-9deb57aaf729" containerName="keystone-cron" Dec 05 18:01:20 crc kubenswrapper[4753]: I1205 18:01:20.656239 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fntrs" Dec 05 18:01:20 crc kubenswrapper[4753]: I1205 18:01:20.663564 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fntrs"] Dec 05 18:01:20 crc kubenswrapper[4753]: I1205 18:01:20.686830 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fd5ecfa8-047f-42cd-abcc-e71deea89135-catalog-content\") pod \"redhat-operators-fntrs\" (UID: \"fd5ecfa8-047f-42cd-abcc-e71deea89135\") " pod="openshift-marketplace/redhat-operators-fntrs" Dec 05 18:01:20 crc kubenswrapper[4753]: I1205 18:01:20.686903 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vzfkg\" (UniqueName: \"kubernetes.io/projected/fd5ecfa8-047f-42cd-abcc-e71deea89135-kube-api-access-vzfkg\") pod \"redhat-operators-fntrs\" (UID: \"fd5ecfa8-047f-42cd-abcc-e71deea89135\") " pod="openshift-marketplace/redhat-operators-fntrs" Dec 05 18:01:20 crc kubenswrapper[4753]: I1205 18:01:20.687237 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fd5ecfa8-047f-42cd-abcc-e71deea89135-utilities\") pod \"redhat-operators-fntrs\" (UID: \"fd5ecfa8-047f-42cd-abcc-e71deea89135\") " pod="openshift-marketplace/redhat-operators-fntrs" Dec 05 18:01:20 crc kubenswrapper[4753]: I1205 18:01:20.789314 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fd5ecfa8-047f-42cd-abcc-e71deea89135-utilities\") pod \"redhat-operators-fntrs\" (UID: \"fd5ecfa8-047f-42cd-abcc-e71deea89135\") " pod="openshift-marketplace/redhat-operators-fntrs" Dec 05 18:01:20 crc kubenswrapper[4753]: I1205 18:01:20.790165 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fd5ecfa8-047f-42cd-abcc-e71deea89135-catalog-content\") pod \"redhat-operators-fntrs\" (UID: \"fd5ecfa8-047f-42cd-abcc-e71deea89135\") " pod="openshift-marketplace/redhat-operators-fntrs" Dec 05 18:01:20 crc kubenswrapper[4753]: I1205 18:01:20.790365 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vzfkg\" (UniqueName: \"kubernetes.io/projected/fd5ecfa8-047f-42cd-abcc-e71deea89135-kube-api-access-vzfkg\") pod \"redhat-operators-fntrs\" (UID: \"fd5ecfa8-047f-42cd-abcc-e71deea89135\") " pod="openshift-marketplace/redhat-operators-fntrs" Dec 05 18:01:20 crc kubenswrapper[4753]: I1205 18:01:20.790838 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fd5ecfa8-047f-42cd-abcc-e71deea89135-catalog-content\") pod \"redhat-operators-fntrs\" (UID: \"fd5ecfa8-047f-42cd-abcc-e71deea89135\") " pod="openshift-marketplace/redhat-operators-fntrs" Dec 05 18:01:20 crc kubenswrapper[4753]: I1205 18:01:20.792137 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fd5ecfa8-047f-42cd-abcc-e71deea89135-utilities\") pod \"redhat-operators-fntrs\" (UID: \"fd5ecfa8-047f-42cd-abcc-e71deea89135\") " pod="openshift-marketplace/redhat-operators-fntrs" Dec 05 18:01:20 crc kubenswrapper[4753]: I1205 18:01:20.816243 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vzfkg\" (UniqueName: \"kubernetes.io/projected/fd5ecfa8-047f-42cd-abcc-e71deea89135-kube-api-access-vzfkg\") pod \"redhat-operators-fntrs\" (UID: \"fd5ecfa8-047f-42cd-abcc-e71deea89135\") " pod="openshift-marketplace/redhat-operators-fntrs" Dec 05 18:01:20 crc kubenswrapper[4753]: I1205 18:01:20.991537 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fntrs" Dec 05 18:01:23 crc kubenswrapper[4753]: I1205 18:01:21.489179 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fntrs"] Dec 05 18:01:23 crc kubenswrapper[4753]: I1205 18:01:22.137837 4753 generic.go:334] "Generic (PLEG): container finished" podID="fd5ecfa8-047f-42cd-abcc-e71deea89135" containerID="d2ffa4a3aa0e39adf71db1853a5a1750a334a5c9c12f0a116946b2678e754d72" exitCode=0 Dec 05 18:01:23 crc kubenswrapper[4753]: I1205 18:01:22.137959 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fntrs" event={"ID":"fd5ecfa8-047f-42cd-abcc-e71deea89135","Type":"ContainerDied","Data":"d2ffa4a3aa0e39adf71db1853a5a1750a334a5c9c12f0a116946b2678e754d72"} Dec 05 18:01:23 crc kubenswrapper[4753]: I1205 18:01:22.138218 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fntrs" event={"ID":"fd5ecfa8-047f-42cd-abcc-e71deea89135","Type":"ContainerStarted","Data":"b2b3e14955c70f02999f6c3e16d8e10b44bbf0203975cd4887b7824fc277622b"} Dec 05 18:01:24 crc kubenswrapper[4753]: I1205 18:01:24.161231 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fntrs" event={"ID":"fd5ecfa8-047f-42cd-abcc-e71deea89135","Type":"ContainerStarted","Data":"e5e74d665c779751b352566ab63717d741f68413ace14f2e139c51b71489c483"} Dec 05 18:01:26 crc kubenswrapper[4753]: I1205 18:01:26.720962 4753 scope.go:117] "RemoveContainer" containerID="ac6f1e1e98711fb4ac9c08b197f72af512a76d659ae2deed4807bbfa0559c18e" Dec 05 18:01:26 crc kubenswrapper[4753]: E1205 18:01:26.721621 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 18:01:27 crc kubenswrapper[4753]: I1205 18:01:27.193936 4753 generic.go:334] "Generic (PLEG): container finished" podID="fd5ecfa8-047f-42cd-abcc-e71deea89135" containerID="e5e74d665c779751b352566ab63717d741f68413ace14f2e139c51b71489c483" exitCode=0 Dec 05 18:01:27 crc kubenswrapper[4753]: I1205 18:01:27.193987 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fntrs" event={"ID":"fd5ecfa8-047f-42cd-abcc-e71deea89135","Type":"ContainerDied","Data":"e5e74d665c779751b352566ab63717d741f68413ace14f2e139c51b71489c483"} Dec 05 18:01:28 crc kubenswrapper[4753]: I1205 18:01:28.206781 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fntrs" event={"ID":"fd5ecfa8-047f-42cd-abcc-e71deea89135","Type":"ContainerStarted","Data":"3349ad7160cd4852f49bd51705c42ba15f15a8e82c58e1dbd0de4ec5c597e5e6"} Dec 05 18:01:28 crc kubenswrapper[4753]: I1205 18:01:28.234169 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-fntrs" podStartSLOduration=2.595508249 podStartE2EDuration="8.234131573s" podCreationTimestamp="2025-12-05 18:01:20 +0000 UTC" firstStartedPulling="2025-12-05 18:01:22.142023338 +0000 UTC m=+3420.645130344" lastFinishedPulling="2025-12-05 18:01:27.780646662 +0000 UTC m=+3426.283753668" observedRunningTime="2025-12-05 18:01:28.230990083 +0000 UTC m=+3426.734097089" watchObservedRunningTime="2025-12-05 18:01:28.234131573 +0000 UTC m=+3426.737238579" Dec 05 18:01:29 crc kubenswrapper[4753]: E1205 18:01:29.999445 4753 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfd5ecfa8_047f_42cd_abcc_e71deea89135.slice/crio-d2ffa4a3aa0e39adf71db1853a5a1750a334a5c9c12f0a116946b2678e754d72.scope\": RecentStats: unable to find data in memory cache]" Dec 05 18:01:30 crc kubenswrapper[4753]: I1205 18:01:30.991925 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-fntrs" Dec 05 18:01:30 crc kubenswrapper[4753]: I1205 18:01:30.992205 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-fntrs" Dec 05 18:01:31 crc kubenswrapper[4753]: I1205 18:01:31.173751 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-qrlsq"] Dec 05 18:01:31 crc kubenswrapper[4753]: I1205 18:01:31.178580 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qrlsq" Dec 05 18:01:31 crc kubenswrapper[4753]: I1205 18:01:31.186397 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qrlsq"] Dec 05 18:01:31 crc kubenswrapper[4753]: I1205 18:01:31.333518 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/262afcb5-c6c5-4180-abd8-bbda6afdde9c-catalog-content\") pod \"certified-operators-qrlsq\" (UID: \"262afcb5-c6c5-4180-abd8-bbda6afdde9c\") " pod="openshift-marketplace/certified-operators-qrlsq" Dec 05 18:01:31 crc kubenswrapper[4753]: I1205 18:01:31.333959 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v9kbg\" (UniqueName: \"kubernetes.io/projected/262afcb5-c6c5-4180-abd8-bbda6afdde9c-kube-api-access-v9kbg\") pod \"certified-operators-qrlsq\" (UID: \"262afcb5-c6c5-4180-abd8-bbda6afdde9c\") " pod="openshift-marketplace/certified-operators-qrlsq" Dec 05 18:01:31 crc kubenswrapper[4753]: I1205 18:01:31.334116 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/262afcb5-c6c5-4180-abd8-bbda6afdde9c-utilities\") pod \"certified-operators-qrlsq\" (UID: \"262afcb5-c6c5-4180-abd8-bbda6afdde9c\") " pod="openshift-marketplace/certified-operators-qrlsq" Dec 05 18:01:31 crc kubenswrapper[4753]: I1205 18:01:31.379016 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-kn9b6"] Dec 05 18:01:31 crc kubenswrapper[4753]: I1205 18:01:31.381294 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kn9b6" Dec 05 18:01:31 crc kubenswrapper[4753]: I1205 18:01:31.395696 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-kn9b6"] Dec 05 18:01:31 crc kubenswrapper[4753]: I1205 18:01:31.436671 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x9jtp\" (UniqueName: \"kubernetes.io/projected/3c8f969a-4bda-4370-af8d-0bb990892d5b-kube-api-access-x9jtp\") pod \"community-operators-kn9b6\" (UID: \"3c8f969a-4bda-4370-af8d-0bb990892d5b\") " pod="openshift-marketplace/community-operators-kn9b6" Dec 05 18:01:31 crc kubenswrapper[4753]: I1205 18:01:31.436941 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3c8f969a-4bda-4370-af8d-0bb990892d5b-catalog-content\") pod \"community-operators-kn9b6\" (UID: \"3c8f969a-4bda-4370-af8d-0bb990892d5b\") " pod="openshift-marketplace/community-operators-kn9b6" Dec 05 18:01:31 crc kubenswrapper[4753]: I1205 18:01:31.437119 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/262afcb5-c6c5-4180-abd8-bbda6afdde9c-catalog-content\") pod \"certified-operators-qrlsq\" (UID: \"262afcb5-c6c5-4180-abd8-bbda6afdde9c\") " pod="openshift-marketplace/certified-operators-qrlsq" Dec 05 18:01:31 crc kubenswrapper[4753]: I1205 18:01:31.437261 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3c8f969a-4bda-4370-af8d-0bb990892d5b-utilities\") pod \"community-operators-kn9b6\" (UID: \"3c8f969a-4bda-4370-af8d-0bb990892d5b\") " pod="openshift-marketplace/community-operators-kn9b6" Dec 05 18:01:31 crc kubenswrapper[4753]: I1205 18:01:31.437409 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v9kbg\" (UniqueName: \"kubernetes.io/projected/262afcb5-c6c5-4180-abd8-bbda6afdde9c-kube-api-access-v9kbg\") pod \"certified-operators-qrlsq\" (UID: \"262afcb5-c6c5-4180-abd8-bbda6afdde9c\") " pod="openshift-marketplace/certified-operators-qrlsq" Dec 05 18:01:31 crc kubenswrapper[4753]: I1205 18:01:31.437581 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/262afcb5-c6c5-4180-abd8-bbda6afdde9c-utilities\") pod \"certified-operators-qrlsq\" (UID: \"262afcb5-c6c5-4180-abd8-bbda6afdde9c\") " pod="openshift-marketplace/certified-operators-qrlsq" Dec 05 18:01:31 crc kubenswrapper[4753]: I1205 18:01:31.437705 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/262afcb5-c6c5-4180-abd8-bbda6afdde9c-catalog-content\") pod \"certified-operators-qrlsq\" (UID: \"262afcb5-c6c5-4180-abd8-bbda6afdde9c\") " pod="openshift-marketplace/certified-operators-qrlsq" Dec 05 18:01:31 crc kubenswrapper[4753]: I1205 18:01:31.437952 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/262afcb5-c6c5-4180-abd8-bbda6afdde9c-utilities\") pod \"certified-operators-qrlsq\" (UID: \"262afcb5-c6c5-4180-abd8-bbda6afdde9c\") " pod="openshift-marketplace/certified-operators-qrlsq" Dec 05 18:01:31 crc kubenswrapper[4753]: I1205 18:01:31.467738 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v9kbg\" (UniqueName: \"kubernetes.io/projected/262afcb5-c6c5-4180-abd8-bbda6afdde9c-kube-api-access-v9kbg\") pod \"certified-operators-qrlsq\" (UID: \"262afcb5-c6c5-4180-abd8-bbda6afdde9c\") " pod="openshift-marketplace/certified-operators-qrlsq" Dec 05 18:01:31 crc kubenswrapper[4753]: I1205 18:01:31.499562 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qrlsq" Dec 05 18:01:31 crc kubenswrapper[4753]: I1205 18:01:31.538943 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x9jtp\" (UniqueName: \"kubernetes.io/projected/3c8f969a-4bda-4370-af8d-0bb990892d5b-kube-api-access-x9jtp\") pod \"community-operators-kn9b6\" (UID: \"3c8f969a-4bda-4370-af8d-0bb990892d5b\") " pod="openshift-marketplace/community-operators-kn9b6" Dec 05 18:01:31 crc kubenswrapper[4753]: I1205 18:01:31.539020 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3c8f969a-4bda-4370-af8d-0bb990892d5b-catalog-content\") pod \"community-operators-kn9b6\" (UID: \"3c8f969a-4bda-4370-af8d-0bb990892d5b\") " pod="openshift-marketplace/community-operators-kn9b6" Dec 05 18:01:31 crc kubenswrapper[4753]: I1205 18:01:31.539109 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3c8f969a-4bda-4370-af8d-0bb990892d5b-utilities\") pod \"community-operators-kn9b6\" (UID: \"3c8f969a-4bda-4370-af8d-0bb990892d5b\") " pod="openshift-marketplace/community-operators-kn9b6" Dec 05 18:01:31 crc kubenswrapper[4753]: I1205 18:01:31.539632 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3c8f969a-4bda-4370-af8d-0bb990892d5b-utilities\") pod \"community-operators-kn9b6\" (UID: \"3c8f969a-4bda-4370-af8d-0bb990892d5b\") " pod="openshift-marketplace/community-operators-kn9b6" Dec 05 18:01:31 crc kubenswrapper[4753]: I1205 18:01:31.540225 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3c8f969a-4bda-4370-af8d-0bb990892d5b-catalog-content\") pod \"community-operators-kn9b6\" (UID: \"3c8f969a-4bda-4370-af8d-0bb990892d5b\") " pod="openshift-marketplace/community-operators-kn9b6" Dec 05 18:01:31 crc kubenswrapper[4753]: I1205 18:01:31.558715 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x9jtp\" (UniqueName: \"kubernetes.io/projected/3c8f969a-4bda-4370-af8d-0bb990892d5b-kube-api-access-x9jtp\") pod \"community-operators-kn9b6\" (UID: \"3c8f969a-4bda-4370-af8d-0bb990892d5b\") " pod="openshift-marketplace/community-operators-kn9b6" Dec 05 18:01:31 crc kubenswrapper[4753]: I1205 18:01:31.731659 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kn9b6" Dec 05 18:01:31 crc kubenswrapper[4753]: I1205 18:01:31.977913 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-qrlsq"] Dec 05 18:01:32 crc kubenswrapper[4753]: I1205 18:01:32.049812 4753 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-fntrs" podUID="fd5ecfa8-047f-42cd-abcc-e71deea89135" containerName="registry-server" probeResult="failure" output=< Dec 05 18:01:32 crc kubenswrapper[4753]: timeout: failed to connect service ":50051" within 1s Dec 05 18:01:32 crc kubenswrapper[4753]: > Dec 05 18:01:32 crc kubenswrapper[4753]: I1205 18:01:32.253420 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qrlsq" event={"ID":"262afcb5-c6c5-4180-abd8-bbda6afdde9c","Type":"ContainerStarted","Data":"ab58aefc6596057fc3c92562cdf7176d9be9dc268e0a3aae4ebef95c2f579f0b"} Dec 05 18:01:32 crc kubenswrapper[4753]: I1205 18:01:32.368656 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-kn9b6"] Dec 05 18:01:33 crc kubenswrapper[4753]: I1205 18:01:33.263669 4753 generic.go:334] "Generic (PLEG): container finished" podID="3c8f969a-4bda-4370-af8d-0bb990892d5b" containerID="4a987f80d899f0a80f28e1708735ab0bb88e173d85b81accc0586457532f969e" exitCode=0 Dec 05 18:01:33 crc kubenswrapper[4753]: I1205 18:01:33.263937 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kn9b6" event={"ID":"3c8f969a-4bda-4370-af8d-0bb990892d5b","Type":"ContainerDied","Data":"4a987f80d899f0a80f28e1708735ab0bb88e173d85b81accc0586457532f969e"} Dec 05 18:01:33 crc kubenswrapper[4753]: I1205 18:01:33.263965 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kn9b6" event={"ID":"3c8f969a-4bda-4370-af8d-0bb990892d5b","Type":"ContainerStarted","Data":"90307df0e6bf11b61b86517c6459c31879ef4cc3a1c9c158aced97cc923c54bb"} Dec 05 18:01:33 crc kubenswrapper[4753]: I1205 18:01:33.267586 4753 generic.go:334] "Generic (PLEG): container finished" podID="262afcb5-c6c5-4180-abd8-bbda6afdde9c" containerID="3b0e26ad381fbaf0d9426451ad4d611ccf30d630497e521d46f26f9e80a77bef" exitCode=0 Dec 05 18:01:33 crc kubenswrapper[4753]: I1205 18:01:33.267625 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qrlsq" event={"ID":"262afcb5-c6c5-4180-abd8-bbda6afdde9c","Type":"ContainerDied","Data":"3b0e26ad381fbaf0d9426451ad4d611ccf30d630497e521d46f26f9e80a77bef"} Dec 05 18:01:34 crc kubenswrapper[4753]: I1205 18:01:34.279230 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kn9b6" event={"ID":"3c8f969a-4bda-4370-af8d-0bb990892d5b","Type":"ContainerStarted","Data":"bb2dd40048f4a4be91dcf9a963116d2a5f3be39c80de5c1b26482f9fd6b455c1"} Dec 05 18:01:34 crc kubenswrapper[4753]: I1205 18:01:34.281738 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qrlsq" event={"ID":"262afcb5-c6c5-4180-abd8-bbda6afdde9c","Type":"ContainerStarted","Data":"175911f7a6fde3553fecfa98735c7e3901cc3393d4e07aa19cd47de7066c9d13"} Dec 05 18:01:36 crc kubenswrapper[4753]: I1205 18:01:36.305809 4753 generic.go:334] "Generic (PLEG): container finished" podID="3c8f969a-4bda-4370-af8d-0bb990892d5b" containerID="bb2dd40048f4a4be91dcf9a963116d2a5f3be39c80de5c1b26482f9fd6b455c1" exitCode=0 Dec 05 18:01:36 crc kubenswrapper[4753]: I1205 18:01:36.306000 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kn9b6" event={"ID":"3c8f969a-4bda-4370-af8d-0bb990892d5b","Type":"ContainerDied","Data":"bb2dd40048f4a4be91dcf9a963116d2a5f3be39c80de5c1b26482f9fd6b455c1"} Dec 05 18:01:37 crc kubenswrapper[4753]: I1205 18:01:37.319073 4753 generic.go:334] "Generic (PLEG): container finished" podID="262afcb5-c6c5-4180-abd8-bbda6afdde9c" containerID="175911f7a6fde3553fecfa98735c7e3901cc3393d4e07aa19cd47de7066c9d13" exitCode=0 Dec 05 18:01:37 crc kubenswrapper[4753]: I1205 18:01:37.319191 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qrlsq" event={"ID":"262afcb5-c6c5-4180-abd8-bbda6afdde9c","Type":"ContainerDied","Data":"175911f7a6fde3553fecfa98735c7e3901cc3393d4e07aa19cd47de7066c9d13"} Dec 05 18:01:37 crc kubenswrapper[4753]: I1205 18:01:37.326195 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kn9b6" event={"ID":"3c8f969a-4bda-4370-af8d-0bb990892d5b","Type":"ContainerStarted","Data":"0b19390afc91e0ef3fe9577e605f3b39e71cebeec4e0bf47e24c1b0f5327ab2e"} Dec 05 18:01:37 crc kubenswrapper[4753]: I1205 18:01:37.388513 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-kn9b6" podStartSLOduration=2.595292322 podStartE2EDuration="6.38848798s" podCreationTimestamp="2025-12-05 18:01:31 +0000 UTC" firstStartedPulling="2025-12-05 18:01:33.266004596 +0000 UTC m=+3431.769111592" lastFinishedPulling="2025-12-05 18:01:37.059200234 +0000 UTC m=+3435.562307250" observedRunningTime="2025-12-05 18:01:37.375912093 +0000 UTC m=+3435.879019119" watchObservedRunningTime="2025-12-05 18:01:37.38848798 +0000 UTC m=+3435.891594996" Dec 05 18:01:38 crc kubenswrapper[4753]: I1205 18:01:38.339387 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qrlsq" event={"ID":"262afcb5-c6c5-4180-abd8-bbda6afdde9c","Type":"ContainerStarted","Data":"4843f30d84994bdc645fd34b9c97d02395f67ac54f556d34805632ad70f01660"} Dec 05 18:01:38 crc kubenswrapper[4753]: I1205 18:01:38.369364 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-qrlsq" podStartSLOduration=2.914649165 podStartE2EDuration="7.369339877s" podCreationTimestamp="2025-12-05 18:01:31 +0000 UTC" firstStartedPulling="2025-12-05 18:01:33.269036752 +0000 UTC m=+3431.772143758" lastFinishedPulling="2025-12-05 18:01:37.723727464 +0000 UTC m=+3436.226834470" observedRunningTime="2025-12-05 18:01:38.356529544 +0000 UTC m=+3436.859636550" watchObservedRunningTime="2025-12-05 18:01:38.369339877 +0000 UTC m=+3436.872446883" Dec 05 18:01:38 crc kubenswrapper[4753]: I1205 18:01:38.721375 4753 scope.go:117] "RemoveContainer" containerID="ac6f1e1e98711fb4ac9c08b197f72af512a76d659ae2deed4807bbfa0559c18e" Dec 05 18:01:39 crc kubenswrapper[4753]: I1205 18:01:39.364528 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" event={"ID":"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68","Type":"ContainerStarted","Data":"252cc3357a5fa9d05ffa340d7bbb2b4848173e79f5fc3938e530e663ff04ce0f"} Dec 05 18:01:40 crc kubenswrapper[4753]: E1205 18:01:40.304347 4753 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfd5ecfa8_047f_42cd_abcc_e71deea89135.slice/crio-d2ffa4a3aa0e39adf71db1853a5a1750a334a5c9c12f0a116946b2678e754d72.scope\": RecentStats: unable to find data in memory cache]" Dec 05 18:01:41 crc kubenswrapper[4753]: I1205 18:01:41.048831 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-fntrs" Dec 05 18:01:41 crc kubenswrapper[4753]: I1205 18:01:41.106854 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-fntrs" Dec 05 18:01:41 crc kubenswrapper[4753]: I1205 18:01:41.500077 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-qrlsq" Dec 05 18:01:41 crc kubenswrapper[4753]: I1205 18:01:41.500132 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-qrlsq" Dec 05 18:01:41 crc kubenswrapper[4753]: I1205 18:01:41.572894 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-qrlsq" Dec 05 18:01:41 crc kubenswrapper[4753]: I1205 18:01:41.743423 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-kn9b6" Dec 05 18:01:41 crc kubenswrapper[4753]: I1205 18:01:41.743481 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-kn9b6" Dec 05 18:01:41 crc kubenswrapper[4753]: I1205 18:01:41.800011 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-kn9b6" Dec 05 18:01:42 crc kubenswrapper[4753]: I1205 18:01:42.157542 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-fntrs"] Dec 05 18:01:42 crc kubenswrapper[4753]: I1205 18:01:42.392456 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-fntrs" podUID="fd5ecfa8-047f-42cd-abcc-e71deea89135" containerName="registry-server" containerID="cri-o://3349ad7160cd4852f49bd51705c42ba15f15a8e82c58e1dbd0de4ec5c597e5e6" gracePeriod=2 Dec 05 18:01:42 crc kubenswrapper[4753]: I1205 18:01:42.446698 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-qrlsq" Dec 05 18:01:42 crc kubenswrapper[4753]: I1205 18:01:42.468879 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-kn9b6" Dec 05 18:01:42 crc kubenswrapper[4753]: I1205 18:01:42.919775 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fntrs" Dec 05 18:01:42 crc kubenswrapper[4753]: I1205 18:01:42.928563 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fd5ecfa8-047f-42cd-abcc-e71deea89135-utilities\") pod \"fd5ecfa8-047f-42cd-abcc-e71deea89135\" (UID: \"fd5ecfa8-047f-42cd-abcc-e71deea89135\") " Dec 05 18:01:42 crc kubenswrapper[4753]: I1205 18:01:42.928818 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fd5ecfa8-047f-42cd-abcc-e71deea89135-catalog-content\") pod \"fd5ecfa8-047f-42cd-abcc-e71deea89135\" (UID: \"fd5ecfa8-047f-42cd-abcc-e71deea89135\") " Dec 05 18:01:42 crc kubenswrapper[4753]: I1205 18:01:42.928869 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vzfkg\" (UniqueName: \"kubernetes.io/projected/fd5ecfa8-047f-42cd-abcc-e71deea89135-kube-api-access-vzfkg\") pod \"fd5ecfa8-047f-42cd-abcc-e71deea89135\" (UID: \"fd5ecfa8-047f-42cd-abcc-e71deea89135\") " Dec 05 18:01:42 crc kubenswrapper[4753]: I1205 18:01:42.929403 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fd5ecfa8-047f-42cd-abcc-e71deea89135-utilities" (OuterVolumeSpecName: "utilities") pod "fd5ecfa8-047f-42cd-abcc-e71deea89135" (UID: "fd5ecfa8-047f-42cd-abcc-e71deea89135"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 18:01:42 crc kubenswrapper[4753]: I1205 18:01:42.930565 4753 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fd5ecfa8-047f-42cd-abcc-e71deea89135-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 18:01:42 crc kubenswrapper[4753]: I1205 18:01:42.935683 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fd5ecfa8-047f-42cd-abcc-e71deea89135-kube-api-access-vzfkg" (OuterVolumeSpecName: "kube-api-access-vzfkg") pod "fd5ecfa8-047f-42cd-abcc-e71deea89135" (UID: "fd5ecfa8-047f-42cd-abcc-e71deea89135"). InnerVolumeSpecName "kube-api-access-vzfkg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 18:01:43 crc kubenswrapper[4753]: I1205 18:01:43.031885 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vzfkg\" (UniqueName: \"kubernetes.io/projected/fd5ecfa8-047f-42cd-abcc-e71deea89135-kube-api-access-vzfkg\") on node \"crc\" DevicePath \"\"" Dec 05 18:01:43 crc kubenswrapper[4753]: I1205 18:01:43.056406 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fd5ecfa8-047f-42cd-abcc-e71deea89135-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fd5ecfa8-047f-42cd-abcc-e71deea89135" (UID: "fd5ecfa8-047f-42cd-abcc-e71deea89135"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 18:01:43 crc kubenswrapper[4753]: I1205 18:01:43.133119 4753 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fd5ecfa8-047f-42cd-abcc-e71deea89135-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 18:01:43 crc kubenswrapper[4753]: I1205 18:01:43.404469 4753 generic.go:334] "Generic (PLEG): container finished" podID="fd5ecfa8-047f-42cd-abcc-e71deea89135" containerID="3349ad7160cd4852f49bd51705c42ba15f15a8e82c58e1dbd0de4ec5c597e5e6" exitCode=0 Dec 05 18:01:43 crc kubenswrapper[4753]: I1205 18:01:43.405306 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fntrs" event={"ID":"fd5ecfa8-047f-42cd-abcc-e71deea89135","Type":"ContainerDied","Data":"3349ad7160cd4852f49bd51705c42ba15f15a8e82c58e1dbd0de4ec5c597e5e6"} Dec 05 18:01:43 crc kubenswrapper[4753]: I1205 18:01:43.405512 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fntrs" event={"ID":"fd5ecfa8-047f-42cd-abcc-e71deea89135","Type":"ContainerDied","Data":"b2b3e14955c70f02999f6c3e16d8e10b44bbf0203975cd4887b7824fc277622b"} Dec 05 18:01:43 crc kubenswrapper[4753]: I1205 18:01:43.405604 4753 scope.go:117] "RemoveContainer" containerID="3349ad7160cd4852f49bd51705c42ba15f15a8e82c58e1dbd0de4ec5c597e5e6" Dec 05 18:01:43 crc kubenswrapper[4753]: I1205 18:01:43.405384 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fntrs" Dec 05 18:01:43 crc kubenswrapper[4753]: I1205 18:01:43.428992 4753 scope.go:117] "RemoveContainer" containerID="e5e74d665c779751b352566ab63717d741f68413ace14f2e139c51b71489c483" Dec 05 18:01:43 crc kubenswrapper[4753]: I1205 18:01:43.445868 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-fntrs"] Dec 05 18:01:43 crc kubenswrapper[4753]: I1205 18:01:43.455469 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-fntrs"] Dec 05 18:01:43 crc kubenswrapper[4753]: I1205 18:01:43.469359 4753 scope.go:117] "RemoveContainer" containerID="d2ffa4a3aa0e39adf71db1853a5a1750a334a5c9c12f0a116946b2678e754d72" Dec 05 18:01:43 crc kubenswrapper[4753]: I1205 18:01:43.526786 4753 scope.go:117] "RemoveContainer" containerID="3349ad7160cd4852f49bd51705c42ba15f15a8e82c58e1dbd0de4ec5c597e5e6" Dec 05 18:01:43 crc kubenswrapper[4753]: E1205 18:01:43.527720 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3349ad7160cd4852f49bd51705c42ba15f15a8e82c58e1dbd0de4ec5c597e5e6\": container with ID starting with 3349ad7160cd4852f49bd51705c42ba15f15a8e82c58e1dbd0de4ec5c597e5e6 not found: ID does not exist" containerID="3349ad7160cd4852f49bd51705c42ba15f15a8e82c58e1dbd0de4ec5c597e5e6" Dec 05 18:01:43 crc kubenswrapper[4753]: I1205 18:01:43.527770 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3349ad7160cd4852f49bd51705c42ba15f15a8e82c58e1dbd0de4ec5c597e5e6"} err="failed to get container status \"3349ad7160cd4852f49bd51705c42ba15f15a8e82c58e1dbd0de4ec5c597e5e6\": rpc error: code = NotFound desc = could not find container \"3349ad7160cd4852f49bd51705c42ba15f15a8e82c58e1dbd0de4ec5c597e5e6\": container with ID starting with 3349ad7160cd4852f49bd51705c42ba15f15a8e82c58e1dbd0de4ec5c597e5e6 not found: ID does not exist" Dec 05 18:01:43 crc kubenswrapper[4753]: I1205 18:01:43.527805 4753 scope.go:117] "RemoveContainer" containerID="e5e74d665c779751b352566ab63717d741f68413ace14f2e139c51b71489c483" Dec 05 18:01:43 crc kubenswrapper[4753]: E1205 18:01:43.528534 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e5e74d665c779751b352566ab63717d741f68413ace14f2e139c51b71489c483\": container with ID starting with e5e74d665c779751b352566ab63717d741f68413ace14f2e139c51b71489c483 not found: ID does not exist" containerID="e5e74d665c779751b352566ab63717d741f68413ace14f2e139c51b71489c483" Dec 05 18:01:43 crc kubenswrapper[4753]: I1205 18:01:43.528564 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e5e74d665c779751b352566ab63717d741f68413ace14f2e139c51b71489c483"} err="failed to get container status \"e5e74d665c779751b352566ab63717d741f68413ace14f2e139c51b71489c483\": rpc error: code = NotFound desc = could not find container \"e5e74d665c779751b352566ab63717d741f68413ace14f2e139c51b71489c483\": container with ID starting with e5e74d665c779751b352566ab63717d741f68413ace14f2e139c51b71489c483 not found: ID does not exist" Dec 05 18:01:43 crc kubenswrapper[4753]: I1205 18:01:43.528577 4753 scope.go:117] "RemoveContainer" containerID="d2ffa4a3aa0e39adf71db1853a5a1750a334a5c9c12f0a116946b2678e754d72" Dec 05 18:01:43 crc kubenswrapper[4753]: E1205 18:01:43.529876 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d2ffa4a3aa0e39adf71db1853a5a1750a334a5c9c12f0a116946b2678e754d72\": container with ID starting with d2ffa4a3aa0e39adf71db1853a5a1750a334a5c9c12f0a116946b2678e754d72 not found: ID does not exist" containerID="d2ffa4a3aa0e39adf71db1853a5a1750a334a5c9c12f0a116946b2678e754d72" Dec 05 18:01:43 crc kubenswrapper[4753]: I1205 18:01:43.529907 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d2ffa4a3aa0e39adf71db1853a5a1750a334a5c9c12f0a116946b2678e754d72"} err="failed to get container status \"d2ffa4a3aa0e39adf71db1853a5a1750a334a5c9c12f0a116946b2678e754d72\": rpc error: code = NotFound desc = could not find container \"d2ffa4a3aa0e39adf71db1853a5a1750a334a5c9c12f0a116946b2678e754d72\": container with ID starting with d2ffa4a3aa0e39adf71db1853a5a1750a334a5c9c12f0a116946b2678e754d72 not found: ID does not exist" Dec 05 18:01:43 crc kubenswrapper[4753]: I1205 18:01:43.733819 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fd5ecfa8-047f-42cd-abcc-e71deea89135" path="/var/lib/kubelet/pods/fd5ecfa8-047f-42cd-abcc-e71deea89135/volumes" Dec 05 18:01:43 crc kubenswrapper[4753]: I1205 18:01:43.966516 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-qrlsq"] Dec 05 18:01:44 crc kubenswrapper[4753]: I1205 18:01:44.415817 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-qrlsq" podUID="262afcb5-c6c5-4180-abd8-bbda6afdde9c" containerName="registry-server" containerID="cri-o://4843f30d84994bdc645fd34b9c97d02395f67ac54f556d34805632ad70f01660" gracePeriod=2 Dec 05 18:01:44 crc kubenswrapper[4753]: I1205 18:01:44.563673 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-kn9b6"] Dec 05 18:01:44 crc kubenswrapper[4753]: I1205 18:01:44.563921 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-kn9b6" podUID="3c8f969a-4bda-4370-af8d-0bb990892d5b" containerName="registry-server" containerID="cri-o://0b19390afc91e0ef3fe9577e605f3b39e71cebeec4e0bf47e24c1b0f5327ab2e" gracePeriod=2 Dec 05 18:01:44 crc kubenswrapper[4753]: I1205 18:01:44.959036 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qrlsq" Dec 05 18:01:44 crc kubenswrapper[4753]: I1205 18:01:44.978548 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/262afcb5-c6c5-4180-abd8-bbda6afdde9c-catalog-content\") pod \"262afcb5-c6c5-4180-abd8-bbda6afdde9c\" (UID: \"262afcb5-c6c5-4180-abd8-bbda6afdde9c\") " Dec 05 18:01:44 crc kubenswrapper[4753]: I1205 18:01:44.978909 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v9kbg\" (UniqueName: \"kubernetes.io/projected/262afcb5-c6c5-4180-abd8-bbda6afdde9c-kube-api-access-v9kbg\") pod \"262afcb5-c6c5-4180-abd8-bbda6afdde9c\" (UID: \"262afcb5-c6c5-4180-abd8-bbda6afdde9c\") " Dec 05 18:01:44 crc kubenswrapper[4753]: I1205 18:01:44.978953 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/262afcb5-c6c5-4180-abd8-bbda6afdde9c-utilities\") pod \"262afcb5-c6c5-4180-abd8-bbda6afdde9c\" (UID: \"262afcb5-c6c5-4180-abd8-bbda6afdde9c\") " Dec 05 18:01:44 crc kubenswrapper[4753]: I1205 18:01:44.979762 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/262afcb5-c6c5-4180-abd8-bbda6afdde9c-utilities" (OuterVolumeSpecName: "utilities") pod "262afcb5-c6c5-4180-abd8-bbda6afdde9c" (UID: "262afcb5-c6c5-4180-abd8-bbda6afdde9c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 18:01:44 crc kubenswrapper[4753]: I1205 18:01:44.985848 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/262afcb5-c6c5-4180-abd8-bbda6afdde9c-kube-api-access-v9kbg" (OuterVolumeSpecName: "kube-api-access-v9kbg") pod "262afcb5-c6c5-4180-abd8-bbda6afdde9c" (UID: "262afcb5-c6c5-4180-abd8-bbda6afdde9c"). InnerVolumeSpecName "kube-api-access-v9kbg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 18:01:45 crc kubenswrapper[4753]: I1205 18:01:45.037588 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/262afcb5-c6c5-4180-abd8-bbda6afdde9c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "262afcb5-c6c5-4180-abd8-bbda6afdde9c" (UID: "262afcb5-c6c5-4180-abd8-bbda6afdde9c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 18:01:45 crc kubenswrapper[4753]: I1205 18:01:45.037910 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kn9b6" Dec 05 18:01:45 crc kubenswrapper[4753]: I1205 18:01:45.080400 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3c8f969a-4bda-4370-af8d-0bb990892d5b-catalog-content\") pod \"3c8f969a-4bda-4370-af8d-0bb990892d5b\" (UID: \"3c8f969a-4bda-4370-af8d-0bb990892d5b\") " Dec 05 18:01:45 crc kubenswrapper[4753]: I1205 18:01:45.080515 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3c8f969a-4bda-4370-af8d-0bb990892d5b-utilities\") pod \"3c8f969a-4bda-4370-af8d-0bb990892d5b\" (UID: \"3c8f969a-4bda-4370-af8d-0bb990892d5b\") " Dec 05 18:01:45 crc kubenswrapper[4753]: I1205 18:01:45.080568 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x9jtp\" (UniqueName: \"kubernetes.io/projected/3c8f969a-4bda-4370-af8d-0bb990892d5b-kube-api-access-x9jtp\") pod \"3c8f969a-4bda-4370-af8d-0bb990892d5b\" (UID: \"3c8f969a-4bda-4370-af8d-0bb990892d5b\") " Dec 05 18:01:45 crc kubenswrapper[4753]: I1205 18:01:45.080952 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v9kbg\" (UniqueName: \"kubernetes.io/projected/262afcb5-c6c5-4180-abd8-bbda6afdde9c-kube-api-access-v9kbg\") on node \"crc\" DevicePath \"\"" Dec 05 18:01:45 crc kubenswrapper[4753]: I1205 18:01:45.080972 4753 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/262afcb5-c6c5-4180-abd8-bbda6afdde9c-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 18:01:45 crc kubenswrapper[4753]: I1205 18:01:45.080982 4753 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/262afcb5-c6c5-4180-abd8-bbda6afdde9c-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 18:01:45 crc kubenswrapper[4753]: I1205 18:01:45.081383 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3c8f969a-4bda-4370-af8d-0bb990892d5b-utilities" (OuterVolumeSpecName: "utilities") pod "3c8f969a-4bda-4370-af8d-0bb990892d5b" (UID: "3c8f969a-4bda-4370-af8d-0bb990892d5b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 18:01:45 crc kubenswrapper[4753]: I1205 18:01:45.083536 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3c8f969a-4bda-4370-af8d-0bb990892d5b-kube-api-access-x9jtp" (OuterVolumeSpecName: "kube-api-access-x9jtp") pod "3c8f969a-4bda-4370-af8d-0bb990892d5b" (UID: "3c8f969a-4bda-4370-af8d-0bb990892d5b"). InnerVolumeSpecName "kube-api-access-x9jtp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 18:01:45 crc kubenswrapper[4753]: I1205 18:01:45.142586 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3c8f969a-4bda-4370-af8d-0bb990892d5b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3c8f969a-4bda-4370-af8d-0bb990892d5b" (UID: "3c8f969a-4bda-4370-af8d-0bb990892d5b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 18:01:45 crc kubenswrapper[4753]: I1205 18:01:45.184107 4753 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3c8f969a-4bda-4370-af8d-0bb990892d5b-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 18:01:45 crc kubenswrapper[4753]: I1205 18:01:45.184183 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x9jtp\" (UniqueName: \"kubernetes.io/projected/3c8f969a-4bda-4370-af8d-0bb990892d5b-kube-api-access-x9jtp\") on node \"crc\" DevicePath \"\"" Dec 05 18:01:45 crc kubenswrapper[4753]: I1205 18:01:45.184197 4753 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3c8f969a-4bda-4370-af8d-0bb990892d5b-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 18:01:45 crc kubenswrapper[4753]: I1205 18:01:45.428706 4753 generic.go:334] "Generic (PLEG): container finished" podID="262afcb5-c6c5-4180-abd8-bbda6afdde9c" containerID="4843f30d84994bdc645fd34b9c97d02395f67ac54f556d34805632ad70f01660" exitCode=0 Dec 05 18:01:45 crc kubenswrapper[4753]: I1205 18:01:45.428749 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qrlsq" event={"ID":"262afcb5-c6c5-4180-abd8-bbda6afdde9c","Type":"ContainerDied","Data":"4843f30d84994bdc645fd34b9c97d02395f67ac54f556d34805632ad70f01660"} Dec 05 18:01:45 crc kubenswrapper[4753]: I1205 18:01:45.428785 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-qrlsq" Dec 05 18:01:45 crc kubenswrapper[4753]: I1205 18:01:45.428794 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-qrlsq" event={"ID":"262afcb5-c6c5-4180-abd8-bbda6afdde9c","Type":"ContainerDied","Data":"ab58aefc6596057fc3c92562cdf7176d9be9dc268e0a3aae4ebef95c2f579f0b"} Dec 05 18:01:45 crc kubenswrapper[4753]: I1205 18:01:45.428822 4753 scope.go:117] "RemoveContainer" containerID="4843f30d84994bdc645fd34b9c97d02395f67ac54f556d34805632ad70f01660" Dec 05 18:01:45 crc kubenswrapper[4753]: I1205 18:01:45.432373 4753 generic.go:334] "Generic (PLEG): container finished" podID="3c8f969a-4bda-4370-af8d-0bb990892d5b" containerID="0b19390afc91e0ef3fe9577e605f3b39e71cebeec4e0bf47e24c1b0f5327ab2e" exitCode=0 Dec 05 18:01:45 crc kubenswrapper[4753]: I1205 18:01:45.432410 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kn9b6" event={"ID":"3c8f969a-4bda-4370-af8d-0bb990892d5b","Type":"ContainerDied","Data":"0b19390afc91e0ef3fe9577e605f3b39e71cebeec4e0bf47e24c1b0f5327ab2e"} Dec 05 18:01:45 crc kubenswrapper[4753]: I1205 18:01:45.432438 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kn9b6" event={"ID":"3c8f969a-4bda-4370-af8d-0bb990892d5b","Type":"ContainerDied","Data":"90307df0e6bf11b61b86517c6459c31879ef4cc3a1c9c158aced97cc923c54bb"} Dec 05 18:01:45 crc kubenswrapper[4753]: I1205 18:01:45.432463 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kn9b6" Dec 05 18:01:45 crc kubenswrapper[4753]: I1205 18:01:45.467164 4753 scope.go:117] "RemoveContainer" containerID="175911f7a6fde3553fecfa98735c7e3901cc3393d4e07aa19cd47de7066c9d13" Dec 05 18:01:45 crc kubenswrapper[4753]: I1205 18:01:45.470579 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-qrlsq"] Dec 05 18:01:45 crc kubenswrapper[4753]: I1205 18:01:45.483683 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-qrlsq"] Dec 05 18:01:45 crc kubenswrapper[4753]: I1205 18:01:45.490760 4753 scope.go:117] "RemoveContainer" containerID="3b0e26ad381fbaf0d9426451ad4d611ccf30d630497e521d46f26f9e80a77bef" Dec 05 18:01:45 crc kubenswrapper[4753]: I1205 18:01:45.492981 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-kn9b6"] Dec 05 18:01:45 crc kubenswrapper[4753]: I1205 18:01:45.503433 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-kn9b6"] Dec 05 18:01:45 crc kubenswrapper[4753]: I1205 18:01:45.511775 4753 scope.go:117] "RemoveContainer" containerID="4843f30d84994bdc645fd34b9c97d02395f67ac54f556d34805632ad70f01660" Dec 05 18:01:45 crc kubenswrapper[4753]: E1205 18:01:45.512254 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4843f30d84994bdc645fd34b9c97d02395f67ac54f556d34805632ad70f01660\": container with ID starting with 4843f30d84994bdc645fd34b9c97d02395f67ac54f556d34805632ad70f01660 not found: ID does not exist" containerID="4843f30d84994bdc645fd34b9c97d02395f67ac54f556d34805632ad70f01660" Dec 05 18:01:45 crc kubenswrapper[4753]: I1205 18:01:45.512292 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4843f30d84994bdc645fd34b9c97d02395f67ac54f556d34805632ad70f01660"} err="failed to get container status \"4843f30d84994bdc645fd34b9c97d02395f67ac54f556d34805632ad70f01660\": rpc error: code = NotFound desc = could not find container \"4843f30d84994bdc645fd34b9c97d02395f67ac54f556d34805632ad70f01660\": container with ID starting with 4843f30d84994bdc645fd34b9c97d02395f67ac54f556d34805632ad70f01660 not found: ID does not exist" Dec 05 18:01:45 crc kubenswrapper[4753]: I1205 18:01:45.512313 4753 scope.go:117] "RemoveContainer" containerID="175911f7a6fde3553fecfa98735c7e3901cc3393d4e07aa19cd47de7066c9d13" Dec 05 18:01:45 crc kubenswrapper[4753]: E1205 18:01:45.512635 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"175911f7a6fde3553fecfa98735c7e3901cc3393d4e07aa19cd47de7066c9d13\": container with ID starting with 175911f7a6fde3553fecfa98735c7e3901cc3393d4e07aa19cd47de7066c9d13 not found: ID does not exist" containerID="175911f7a6fde3553fecfa98735c7e3901cc3393d4e07aa19cd47de7066c9d13" Dec 05 18:01:45 crc kubenswrapper[4753]: I1205 18:01:45.512686 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"175911f7a6fde3553fecfa98735c7e3901cc3393d4e07aa19cd47de7066c9d13"} err="failed to get container status \"175911f7a6fde3553fecfa98735c7e3901cc3393d4e07aa19cd47de7066c9d13\": rpc error: code = NotFound desc = could not find container \"175911f7a6fde3553fecfa98735c7e3901cc3393d4e07aa19cd47de7066c9d13\": container with ID starting with 175911f7a6fde3553fecfa98735c7e3901cc3393d4e07aa19cd47de7066c9d13 not found: ID does not exist" Dec 05 18:01:45 crc kubenswrapper[4753]: I1205 18:01:45.512723 4753 scope.go:117] "RemoveContainer" containerID="3b0e26ad381fbaf0d9426451ad4d611ccf30d630497e521d46f26f9e80a77bef" Dec 05 18:01:45 crc kubenswrapper[4753]: E1205 18:01:45.513036 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3b0e26ad381fbaf0d9426451ad4d611ccf30d630497e521d46f26f9e80a77bef\": container with ID starting with 3b0e26ad381fbaf0d9426451ad4d611ccf30d630497e521d46f26f9e80a77bef not found: ID does not exist" containerID="3b0e26ad381fbaf0d9426451ad4d611ccf30d630497e521d46f26f9e80a77bef" Dec 05 18:01:45 crc kubenswrapper[4753]: I1205 18:01:45.513063 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3b0e26ad381fbaf0d9426451ad4d611ccf30d630497e521d46f26f9e80a77bef"} err="failed to get container status \"3b0e26ad381fbaf0d9426451ad4d611ccf30d630497e521d46f26f9e80a77bef\": rpc error: code = NotFound desc = could not find container \"3b0e26ad381fbaf0d9426451ad4d611ccf30d630497e521d46f26f9e80a77bef\": container with ID starting with 3b0e26ad381fbaf0d9426451ad4d611ccf30d630497e521d46f26f9e80a77bef not found: ID does not exist" Dec 05 18:01:45 crc kubenswrapper[4753]: I1205 18:01:45.513078 4753 scope.go:117] "RemoveContainer" containerID="0b19390afc91e0ef3fe9577e605f3b39e71cebeec4e0bf47e24c1b0f5327ab2e" Dec 05 18:01:45 crc kubenswrapper[4753]: I1205 18:01:45.531813 4753 scope.go:117] "RemoveContainer" containerID="bb2dd40048f4a4be91dcf9a963116d2a5f3be39c80de5c1b26482f9fd6b455c1" Dec 05 18:01:45 crc kubenswrapper[4753]: I1205 18:01:45.610763 4753 scope.go:117] "RemoveContainer" containerID="4a987f80d899f0a80f28e1708735ab0bb88e173d85b81accc0586457532f969e" Dec 05 18:01:45 crc kubenswrapper[4753]: I1205 18:01:45.659177 4753 scope.go:117] "RemoveContainer" containerID="0b19390afc91e0ef3fe9577e605f3b39e71cebeec4e0bf47e24c1b0f5327ab2e" Dec 05 18:01:45 crc kubenswrapper[4753]: E1205 18:01:45.659497 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0b19390afc91e0ef3fe9577e605f3b39e71cebeec4e0bf47e24c1b0f5327ab2e\": container with ID starting with 0b19390afc91e0ef3fe9577e605f3b39e71cebeec4e0bf47e24c1b0f5327ab2e not found: ID does not exist" containerID="0b19390afc91e0ef3fe9577e605f3b39e71cebeec4e0bf47e24c1b0f5327ab2e" Dec 05 18:01:45 crc kubenswrapper[4753]: I1205 18:01:45.659525 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0b19390afc91e0ef3fe9577e605f3b39e71cebeec4e0bf47e24c1b0f5327ab2e"} err="failed to get container status \"0b19390afc91e0ef3fe9577e605f3b39e71cebeec4e0bf47e24c1b0f5327ab2e\": rpc error: code = NotFound desc = could not find container \"0b19390afc91e0ef3fe9577e605f3b39e71cebeec4e0bf47e24c1b0f5327ab2e\": container with ID starting with 0b19390afc91e0ef3fe9577e605f3b39e71cebeec4e0bf47e24c1b0f5327ab2e not found: ID does not exist" Dec 05 18:01:45 crc kubenswrapper[4753]: I1205 18:01:45.659548 4753 scope.go:117] "RemoveContainer" containerID="bb2dd40048f4a4be91dcf9a963116d2a5f3be39c80de5c1b26482f9fd6b455c1" Dec 05 18:01:45 crc kubenswrapper[4753]: E1205 18:01:45.659771 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bb2dd40048f4a4be91dcf9a963116d2a5f3be39c80de5c1b26482f9fd6b455c1\": container with ID starting with bb2dd40048f4a4be91dcf9a963116d2a5f3be39c80de5c1b26482f9fd6b455c1 not found: ID does not exist" containerID="bb2dd40048f4a4be91dcf9a963116d2a5f3be39c80de5c1b26482f9fd6b455c1" Dec 05 18:01:45 crc kubenswrapper[4753]: I1205 18:01:45.659799 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bb2dd40048f4a4be91dcf9a963116d2a5f3be39c80de5c1b26482f9fd6b455c1"} err="failed to get container status \"bb2dd40048f4a4be91dcf9a963116d2a5f3be39c80de5c1b26482f9fd6b455c1\": rpc error: code = NotFound desc = could not find container \"bb2dd40048f4a4be91dcf9a963116d2a5f3be39c80de5c1b26482f9fd6b455c1\": container with ID starting with bb2dd40048f4a4be91dcf9a963116d2a5f3be39c80de5c1b26482f9fd6b455c1 not found: ID does not exist" Dec 05 18:01:45 crc kubenswrapper[4753]: I1205 18:01:45.659811 4753 scope.go:117] "RemoveContainer" containerID="4a987f80d899f0a80f28e1708735ab0bb88e173d85b81accc0586457532f969e" Dec 05 18:01:45 crc kubenswrapper[4753]: E1205 18:01:45.660110 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4a987f80d899f0a80f28e1708735ab0bb88e173d85b81accc0586457532f969e\": container with ID starting with 4a987f80d899f0a80f28e1708735ab0bb88e173d85b81accc0586457532f969e not found: ID does not exist" containerID="4a987f80d899f0a80f28e1708735ab0bb88e173d85b81accc0586457532f969e" Dec 05 18:01:45 crc kubenswrapper[4753]: I1205 18:01:45.660130 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4a987f80d899f0a80f28e1708735ab0bb88e173d85b81accc0586457532f969e"} err="failed to get container status \"4a987f80d899f0a80f28e1708735ab0bb88e173d85b81accc0586457532f969e\": rpc error: code = NotFound desc = could not find container \"4a987f80d899f0a80f28e1708735ab0bb88e173d85b81accc0586457532f969e\": container with ID starting with 4a987f80d899f0a80f28e1708735ab0bb88e173d85b81accc0586457532f969e not found: ID does not exist" Dec 05 18:01:45 crc kubenswrapper[4753]: I1205 18:01:45.741835 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="262afcb5-c6c5-4180-abd8-bbda6afdde9c" path="/var/lib/kubelet/pods/262afcb5-c6c5-4180-abd8-bbda6afdde9c/volumes" Dec 05 18:01:45 crc kubenswrapper[4753]: I1205 18:01:45.745030 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3c8f969a-4bda-4370-af8d-0bb990892d5b" path="/var/lib/kubelet/pods/3c8f969a-4bda-4370-af8d-0bb990892d5b/volumes" Dec 05 18:01:50 crc kubenswrapper[4753]: E1205 18:01:50.592859 4753 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfd5ecfa8_047f_42cd_abcc_e71deea89135.slice/crio-d2ffa4a3aa0e39adf71db1853a5a1750a334a5c9c12f0a116946b2678e754d72.scope\": RecentStats: unable to find data in memory cache]" Dec 05 18:02:00 crc kubenswrapper[4753]: E1205 18:02:00.902317 4753 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfd5ecfa8_047f_42cd_abcc_e71deea89135.slice/crio-d2ffa4a3aa0e39adf71db1853a5a1750a334a5c9c12f0a116946b2678e754d72.scope\": RecentStats: unable to find data in memory cache]" Dec 05 18:02:11 crc kubenswrapper[4753]: E1205 18:02:11.214169 4753 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfd5ecfa8_047f_42cd_abcc_e71deea89135.slice/crio-d2ffa4a3aa0e39adf71db1853a5a1750a334a5c9c12f0a116946b2678e754d72.scope\": RecentStats: unable to find data in memory cache]" Dec 05 18:02:21 crc kubenswrapper[4753]: E1205 18:02:21.500482 4753 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfd5ecfa8_047f_42cd_abcc_e71deea89135.slice/crio-d2ffa4a3aa0e39adf71db1853a5a1750a334a5c9c12f0a116946b2678e754d72.scope\": RecentStats: unable to find data in memory cache]" Dec 05 18:02:49 crc kubenswrapper[4753]: I1205 18:02:49.178274 4753 generic.go:334] "Generic (PLEG): container finished" podID="9b316eb5-2fa8-4582-afdd-0b94dfe44a6f" containerID="c43d778fc0bcc6eb7121c7b8ed045e66a22d00cc4a6a208df4d5d567cfbcecf0" exitCode=0 Dec 05 18:02:49 crc kubenswrapper[4753]: I1205 18:02:49.178311 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-bq7tl" event={"ID":"9b316eb5-2fa8-4582-afdd-0b94dfe44a6f","Type":"ContainerDied","Data":"c43d778fc0bcc6eb7121c7b8ed045e66a22d00cc4a6a208df4d5d567cfbcecf0"} Dec 05 18:02:50 crc kubenswrapper[4753]: I1205 18:02:50.619975 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-bq7tl" Dec 05 18:02:50 crc kubenswrapper[4753]: I1205 18:02:50.794925 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jfgw8\" (UniqueName: \"kubernetes.io/projected/9b316eb5-2fa8-4582-afdd-0b94dfe44a6f-kube-api-access-jfgw8\") pod \"9b316eb5-2fa8-4582-afdd-0b94dfe44a6f\" (UID: \"9b316eb5-2fa8-4582-afdd-0b94dfe44a6f\") " Dec 05 18:02:50 crc kubenswrapper[4753]: I1205 18:02:50.795194 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/9b316eb5-2fa8-4582-afdd-0b94dfe44a6f-ceilometer-compute-config-data-1\") pod \"9b316eb5-2fa8-4582-afdd-0b94dfe44a6f\" (UID: \"9b316eb5-2fa8-4582-afdd-0b94dfe44a6f\") " Dec 05 18:02:50 crc kubenswrapper[4753]: I1205 18:02:50.795321 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9b316eb5-2fa8-4582-afdd-0b94dfe44a6f-ssh-key\") pod \"9b316eb5-2fa8-4582-afdd-0b94dfe44a6f\" (UID: \"9b316eb5-2fa8-4582-afdd-0b94dfe44a6f\") " Dec 05 18:02:50 crc kubenswrapper[4753]: I1205 18:02:50.795409 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b316eb5-2fa8-4582-afdd-0b94dfe44a6f-telemetry-combined-ca-bundle\") pod \"9b316eb5-2fa8-4582-afdd-0b94dfe44a6f\" (UID: \"9b316eb5-2fa8-4582-afdd-0b94dfe44a6f\") " Dec 05 18:02:50 crc kubenswrapper[4753]: I1205 18:02:50.795450 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9b316eb5-2fa8-4582-afdd-0b94dfe44a6f-inventory\") pod \"9b316eb5-2fa8-4582-afdd-0b94dfe44a6f\" (UID: \"9b316eb5-2fa8-4582-afdd-0b94dfe44a6f\") " Dec 05 18:02:50 crc kubenswrapper[4753]: I1205 18:02:50.795477 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/9b316eb5-2fa8-4582-afdd-0b94dfe44a6f-ceilometer-compute-config-data-2\") pod \"9b316eb5-2fa8-4582-afdd-0b94dfe44a6f\" (UID: \"9b316eb5-2fa8-4582-afdd-0b94dfe44a6f\") " Dec 05 18:02:50 crc kubenswrapper[4753]: I1205 18:02:50.795495 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/9b316eb5-2fa8-4582-afdd-0b94dfe44a6f-ceilometer-compute-config-data-0\") pod \"9b316eb5-2fa8-4582-afdd-0b94dfe44a6f\" (UID: \"9b316eb5-2fa8-4582-afdd-0b94dfe44a6f\") " Dec 05 18:02:50 crc kubenswrapper[4753]: I1205 18:02:50.802172 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9b316eb5-2fa8-4582-afdd-0b94dfe44a6f-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "9b316eb5-2fa8-4582-afdd-0b94dfe44a6f" (UID: "9b316eb5-2fa8-4582-afdd-0b94dfe44a6f"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:02:50 crc kubenswrapper[4753]: I1205 18:02:50.802420 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9b316eb5-2fa8-4582-afdd-0b94dfe44a6f-kube-api-access-jfgw8" (OuterVolumeSpecName: "kube-api-access-jfgw8") pod "9b316eb5-2fa8-4582-afdd-0b94dfe44a6f" (UID: "9b316eb5-2fa8-4582-afdd-0b94dfe44a6f"). InnerVolumeSpecName "kube-api-access-jfgw8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 18:02:50 crc kubenswrapper[4753]: I1205 18:02:50.824839 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9b316eb5-2fa8-4582-afdd-0b94dfe44a6f-ceilometer-compute-config-data-1" (OuterVolumeSpecName: "ceilometer-compute-config-data-1") pod "9b316eb5-2fa8-4582-afdd-0b94dfe44a6f" (UID: "9b316eb5-2fa8-4582-afdd-0b94dfe44a6f"). InnerVolumeSpecName "ceilometer-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:02:50 crc kubenswrapper[4753]: I1205 18:02:50.825394 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9b316eb5-2fa8-4582-afdd-0b94dfe44a6f-ceilometer-compute-config-data-0" (OuterVolumeSpecName: "ceilometer-compute-config-data-0") pod "9b316eb5-2fa8-4582-afdd-0b94dfe44a6f" (UID: "9b316eb5-2fa8-4582-afdd-0b94dfe44a6f"). InnerVolumeSpecName "ceilometer-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:02:50 crc kubenswrapper[4753]: I1205 18:02:50.826017 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9b316eb5-2fa8-4582-afdd-0b94dfe44a6f-ceilometer-compute-config-data-2" (OuterVolumeSpecName: "ceilometer-compute-config-data-2") pod "9b316eb5-2fa8-4582-afdd-0b94dfe44a6f" (UID: "9b316eb5-2fa8-4582-afdd-0b94dfe44a6f"). InnerVolumeSpecName "ceilometer-compute-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:02:50 crc kubenswrapper[4753]: I1205 18:02:50.831747 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9b316eb5-2fa8-4582-afdd-0b94dfe44a6f-inventory" (OuterVolumeSpecName: "inventory") pod "9b316eb5-2fa8-4582-afdd-0b94dfe44a6f" (UID: "9b316eb5-2fa8-4582-afdd-0b94dfe44a6f"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:02:50 crc kubenswrapper[4753]: I1205 18:02:50.839522 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9b316eb5-2fa8-4582-afdd-0b94dfe44a6f-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "9b316eb5-2fa8-4582-afdd-0b94dfe44a6f" (UID: "9b316eb5-2fa8-4582-afdd-0b94dfe44a6f"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:02:50 crc kubenswrapper[4753]: I1205 18:02:50.899339 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jfgw8\" (UniqueName: \"kubernetes.io/projected/9b316eb5-2fa8-4582-afdd-0b94dfe44a6f-kube-api-access-jfgw8\") on node \"crc\" DevicePath \"\"" Dec 05 18:02:50 crc kubenswrapper[4753]: I1205 18:02:50.899955 4753 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/9b316eb5-2fa8-4582-afdd-0b94dfe44a6f-ceilometer-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Dec 05 18:02:50 crc kubenswrapper[4753]: I1205 18:02:50.899993 4753 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9b316eb5-2fa8-4582-afdd-0b94dfe44a6f-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 18:02:50 crc kubenswrapper[4753]: I1205 18:02:50.900007 4753 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9b316eb5-2fa8-4582-afdd-0b94dfe44a6f-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 18:02:50 crc kubenswrapper[4753]: I1205 18:02:50.900020 4753 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9b316eb5-2fa8-4582-afdd-0b94dfe44a6f-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 18:02:50 crc kubenswrapper[4753]: I1205 18:02:50.900033 4753 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/9b316eb5-2fa8-4582-afdd-0b94dfe44a6f-ceilometer-compute-config-data-2\") on node \"crc\" DevicePath \"\"" Dec 05 18:02:50 crc kubenswrapper[4753]: I1205 18:02:50.900064 4753 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/9b316eb5-2fa8-4582-afdd-0b94dfe44a6f-ceilometer-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Dec 05 18:02:51 crc kubenswrapper[4753]: I1205 18:02:51.201092 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-bq7tl" event={"ID":"9b316eb5-2fa8-4582-afdd-0b94dfe44a6f","Type":"ContainerDied","Data":"9ace527330edd3bec26ce517707000791e5c224597c752d19c0934d5f7a816bb"} Dec 05 18:02:51 crc kubenswrapper[4753]: I1205 18:02:51.201472 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9ace527330edd3bec26ce517707000791e5c224597c752d19c0934d5f7a816bb" Dec 05 18:02:51 crc kubenswrapper[4753]: I1205 18:02:51.201219 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-bq7tl" Dec 05 18:03:58 crc kubenswrapper[4753]: I1205 18:03:58.979078 4753 patch_prober.go:28] interesting pod/machine-config-daemon-khn68 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 18:03:58 crc kubenswrapper[4753]: I1205 18:03:58.979684 4753 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 18:04:06 crc kubenswrapper[4753]: I1205 18:04:06.363114 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tempest-tests-tempest"] Dec 05 18:04:06 crc kubenswrapper[4753]: E1205 18:04:06.364449 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b316eb5-2fa8-4582-afdd-0b94dfe44a6f" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 05 18:04:06 crc kubenswrapper[4753]: I1205 18:04:06.364469 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b316eb5-2fa8-4582-afdd-0b94dfe44a6f" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 05 18:04:06 crc kubenswrapper[4753]: E1205 18:04:06.364488 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c8f969a-4bda-4370-af8d-0bb990892d5b" containerName="registry-server" Dec 05 18:04:06 crc kubenswrapper[4753]: I1205 18:04:06.364496 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c8f969a-4bda-4370-af8d-0bb990892d5b" containerName="registry-server" Dec 05 18:04:06 crc kubenswrapper[4753]: E1205 18:04:06.364517 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c8f969a-4bda-4370-af8d-0bb990892d5b" containerName="extract-content" Dec 05 18:04:06 crc kubenswrapper[4753]: I1205 18:04:06.364524 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c8f969a-4bda-4370-af8d-0bb990892d5b" containerName="extract-content" Dec 05 18:04:06 crc kubenswrapper[4753]: E1205 18:04:06.364540 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd5ecfa8-047f-42cd-abcc-e71deea89135" containerName="extract-utilities" Dec 05 18:04:06 crc kubenswrapper[4753]: I1205 18:04:06.364548 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd5ecfa8-047f-42cd-abcc-e71deea89135" containerName="extract-utilities" Dec 05 18:04:06 crc kubenswrapper[4753]: E1205 18:04:06.364576 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="262afcb5-c6c5-4180-abd8-bbda6afdde9c" containerName="extract-utilities" Dec 05 18:04:06 crc kubenswrapper[4753]: I1205 18:04:06.364583 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="262afcb5-c6c5-4180-abd8-bbda6afdde9c" containerName="extract-utilities" Dec 05 18:04:06 crc kubenswrapper[4753]: E1205 18:04:06.364591 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c8f969a-4bda-4370-af8d-0bb990892d5b" containerName="extract-utilities" Dec 05 18:04:06 crc kubenswrapper[4753]: I1205 18:04:06.364599 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c8f969a-4bda-4370-af8d-0bb990892d5b" containerName="extract-utilities" Dec 05 18:04:06 crc kubenswrapper[4753]: E1205 18:04:06.364618 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd5ecfa8-047f-42cd-abcc-e71deea89135" containerName="extract-content" Dec 05 18:04:06 crc kubenswrapper[4753]: I1205 18:04:06.364625 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd5ecfa8-047f-42cd-abcc-e71deea89135" containerName="extract-content" Dec 05 18:04:06 crc kubenswrapper[4753]: E1205 18:04:06.364639 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="262afcb5-c6c5-4180-abd8-bbda6afdde9c" containerName="registry-server" Dec 05 18:04:06 crc kubenswrapper[4753]: I1205 18:04:06.364646 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="262afcb5-c6c5-4180-abd8-bbda6afdde9c" containerName="registry-server" Dec 05 18:04:06 crc kubenswrapper[4753]: E1205 18:04:06.364656 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="262afcb5-c6c5-4180-abd8-bbda6afdde9c" containerName="extract-content" Dec 05 18:04:06 crc kubenswrapper[4753]: I1205 18:04:06.364663 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="262afcb5-c6c5-4180-abd8-bbda6afdde9c" containerName="extract-content" Dec 05 18:04:06 crc kubenswrapper[4753]: E1205 18:04:06.364678 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd5ecfa8-047f-42cd-abcc-e71deea89135" containerName="registry-server" Dec 05 18:04:06 crc kubenswrapper[4753]: I1205 18:04:06.364685 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd5ecfa8-047f-42cd-abcc-e71deea89135" containerName="registry-server" Dec 05 18:04:06 crc kubenswrapper[4753]: I1205 18:04:06.364944 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd5ecfa8-047f-42cd-abcc-e71deea89135" containerName="registry-server" Dec 05 18:04:06 crc kubenswrapper[4753]: I1205 18:04:06.364968 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="3c8f969a-4bda-4370-af8d-0bb990892d5b" containerName="registry-server" Dec 05 18:04:06 crc kubenswrapper[4753]: I1205 18:04:06.364977 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="9b316eb5-2fa8-4582-afdd-0b94dfe44a6f" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 05 18:04:06 crc kubenswrapper[4753]: I1205 18:04:06.364985 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="262afcb5-c6c5-4180-abd8-bbda6afdde9c" containerName="registry-server" Dec 05 18:04:06 crc kubenswrapper[4753]: I1205 18:04:06.365904 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 05 18:04:06 crc kubenswrapper[4753]: I1205 18:04:06.372898 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-custom-data-s0" Dec 05 18:04:06 crc kubenswrapper[4753]: I1205 18:04:06.372920 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"test-operator-controller-priv-key" Dec 05 18:04:06 crc kubenswrapper[4753]: I1205 18:04:06.373720 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Dec 05 18:04:06 crc kubenswrapper[4753]: I1205 18:04:06.380802 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-sgm52" Dec 05 18:04:06 crc kubenswrapper[4753]: I1205 18:04:06.385334 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Dec 05 18:04:06 crc kubenswrapper[4753]: I1205 18:04:06.491863 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"tempest-tests-tempest\" (UID: \"230fdd26-f37e-4a32-a261-efdb39dc8de2\") " pod="openstack/tempest-tests-tempest" Dec 05 18:04:06 crc kubenswrapper[4753]: I1205 18:04:06.491929 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gxjb6\" (UniqueName: \"kubernetes.io/projected/230fdd26-f37e-4a32-a261-efdb39dc8de2-kube-api-access-gxjb6\") pod \"tempest-tests-tempest\" (UID: \"230fdd26-f37e-4a32-a261-efdb39dc8de2\") " pod="openstack/tempest-tests-tempest" Dec 05 18:04:06 crc kubenswrapper[4753]: I1205 18:04:06.491964 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/230fdd26-f37e-4a32-a261-efdb39dc8de2-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"230fdd26-f37e-4a32-a261-efdb39dc8de2\") " pod="openstack/tempest-tests-tempest" Dec 05 18:04:06 crc kubenswrapper[4753]: I1205 18:04:06.491993 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/230fdd26-f37e-4a32-a261-efdb39dc8de2-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"230fdd26-f37e-4a32-a261-efdb39dc8de2\") " pod="openstack/tempest-tests-tempest" Dec 05 18:04:06 crc kubenswrapper[4753]: I1205 18:04:06.492169 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/230fdd26-f37e-4a32-a261-efdb39dc8de2-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"230fdd26-f37e-4a32-a261-efdb39dc8de2\") " pod="openstack/tempest-tests-tempest" Dec 05 18:04:06 crc kubenswrapper[4753]: I1205 18:04:06.492270 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/230fdd26-f37e-4a32-a261-efdb39dc8de2-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"230fdd26-f37e-4a32-a261-efdb39dc8de2\") " pod="openstack/tempest-tests-tempest" Dec 05 18:04:06 crc kubenswrapper[4753]: I1205 18:04:06.492528 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/230fdd26-f37e-4a32-a261-efdb39dc8de2-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"230fdd26-f37e-4a32-a261-efdb39dc8de2\") " pod="openstack/tempest-tests-tempest" Dec 05 18:04:06 crc kubenswrapper[4753]: I1205 18:04:06.492594 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/230fdd26-f37e-4a32-a261-efdb39dc8de2-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"230fdd26-f37e-4a32-a261-efdb39dc8de2\") " pod="openstack/tempest-tests-tempest" Dec 05 18:04:06 crc kubenswrapper[4753]: I1205 18:04:06.492638 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/230fdd26-f37e-4a32-a261-efdb39dc8de2-config-data\") pod \"tempest-tests-tempest\" (UID: \"230fdd26-f37e-4a32-a261-efdb39dc8de2\") " pod="openstack/tempest-tests-tempest" Dec 05 18:04:06 crc kubenswrapper[4753]: I1205 18:04:06.594788 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/230fdd26-f37e-4a32-a261-efdb39dc8de2-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"230fdd26-f37e-4a32-a261-efdb39dc8de2\") " pod="openstack/tempest-tests-tempest" Dec 05 18:04:06 crc kubenswrapper[4753]: I1205 18:04:06.594856 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/230fdd26-f37e-4a32-a261-efdb39dc8de2-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"230fdd26-f37e-4a32-a261-efdb39dc8de2\") " pod="openstack/tempest-tests-tempest" Dec 05 18:04:06 crc kubenswrapper[4753]: I1205 18:04:06.594882 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/230fdd26-f37e-4a32-a261-efdb39dc8de2-config-data\") pod \"tempest-tests-tempest\" (UID: \"230fdd26-f37e-4a32-a261-efdb39dc8de2\") " pod="openstack/tempest-tests-tempest" Dec 05 18:04:06 crc kubenswrapper[4753]: I1205 18:04:06.594961 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"tempest-tests-tempest\" (UID: \"230fdd26-f37e-4a32-a261-efdb39dc8de2\") " pod="openstack/tempest-tests-tempest" Dec 05 18:04:06 crc kubenswrapper[4753]: I1205 18:04:06.594996 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gxjb6\" (UniqueName: \"kubernetes.io/projected/230fdd26-f37e-4a32-a261-efdb39dc8de2-kube-api-access-gxjb6\") pod \"tempest-tests-tempest\" (UID: \"230fdd26-f37e-4a32-a261-efdb39dc8de2\") " pod="openstack/tempest-tests-tempest" Dec 05 18:04:06 crc kubenswrapper[4753]: I1205 18:04:06.595026 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/230fdd26-f37e-4a32-a261-efdb39dc8de2-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"230fdd26-f37e-4a32-a261-efdb39dc8de2\") " pod="openstack/tempest-tests-tempest" Dec 05 18:04:06 crc kubenswrapper[4753]: I1205 18:04:06.595057 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/230fdd26-f37e-4a32-a261-efdb39dc8de2-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"230fdd26-f37e-4a32-a261-efdb39dc8de2\") " pod="openstack/tempest-tests-tempest" Dec 05 18:04:06 crc kubenswrapper[4753]: I1205 18:04:06.595101 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/230fdd26-f37e-4a32-a261-efdb39dc8de2-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"230fdd26-f37e-4a32-a261-efdb39dc8de2\") " pod="openstack/tempest-tests-tempest" Dec 05 18:04:06 crc kubenswrapper[4753]: I1205 18:04:06.595520 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/230fdd26-f37e-4a32-a261-efdb39dc8de2-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"230fdd26-f37e-4a32-a261-efdb39dc8de2\") " pod="openstack/tempest-tests-tempest" Dec 05 18:04:06 crc kubenswrapper[4753]: I1205 18:04:06.595459 4753 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"tempest-tests-tempest\" (UID: \"230fdd26-f37e-4a32-a261-efdb39dc8de2\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/tempest-tests-tempest" Dec 05 18:04:06 crc kubenswrapper[4753]: I1205 18:04:06.595571 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/230fdd26-f37e-4a32-a261-efdb39dc8de2-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"230fdd26-f37e-4a32-a261-efdb39dc8de2\") " pod="openstack/tempest-tests-tempest" Dec 05 18:04:06 crc kubenswrapper[4753]: I1205 18:04:06.595725 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/230fdd26-f37e-4a32-a261-efdb39dc8de2-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"230fdd26-f37e-4a32-a261-efdb39dc8de2\") " pod="openstack/tempest-tests-tempest" Dec 05 18:04:06 crc kubenswrapper[4753]: I1205 18:04:06.596267 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/230fdd26-f37e-4a32-a261-efdb39dc8de2-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"230fdd26-f37e-4a32-a261-efdb39dc8de2\") " pod="openstack/tempest-tests-tempest" Dec 05 18:04:06 crc kubenswrapper[4753]: I1205 18:04:06.596284 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/230fdd26-f37e-4a32-a261-efdb39dc8de2-config-data\") pod \"tempest-tests-tempest\" (UID: \"230fdd26-f37e-4a32-a261-efdb39dc8de2\") " pod="openstack/tempest-tests-tempest" Dec 05 18:04:06 crc kubenswrapper[4753]: I1205 18:04:06.600816 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/230fdd26-f37e-4a32-a261-efdb39dc8de2-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"230fdd26-f37e-4a32-a261-efdb39dc8de2\") " pod="openstack/tempest-tests-tempest" Dec 05 18:04:06 crc kubenswrapper[4753]: I1205 18:04:06.602622 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/230fdd26-f37e-4a32-a261-efdb39dc8de2-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"230fdd26-f37e-4a32-a261-efdb39dc8de2\") " pod="openstack/tempest-tests-tempest" Dec 05 18:04:06 crc kubenswrapper[4753]: I1205 18:04:06.611438 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/230fdd26-f37e-4a32-a261-efdb39dc8de2-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"230fdd26-f37e-4a32-a261-efdb39dc8de2\") " pod="openstack/tempest-tests-tempest" Dec 05 18:04:06 crc kubenswrapper[4753]: I1205 18:04:06.625612 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"tempest-tests-tempest\" (UID: \"230fdd26-f37e-4a32-a261-efdb39dc8de2\") " pod="openstack/tempest-tests-tempest" Dec 05 18:04:06 crc kubenswrapper[4753]: I1205 18:04:06.625936 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gxjb6\" (UniqueName: \"kubernetes.io/projected/230fdd26-f37e-4a32-a261-efdb39dc8de2-kube-api-access-gxjb6\") pod \"tempest-tests-tempest\" (UID: \"230fdd26-f37e-4a32-a261-efdb39dc8de2\") " pod="openstack/tempest-tests-tempest" Dec 05 18:04:06 crc kubenswrapper[4753]: I1205 18:04:06.722245 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 05 18:04:07 crc kubenswrapper[4753]: I1205 18:04:07.204286 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Dec 05 18:04:07 crc kubenswrapper[4753]: I1205 18:04:07.205656 4753 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 18:04:08 crc kubenswrapper[4753]: I1205 18:04:08.041323 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"230fdd26-f37e-4a32-a261-efdb39dc8de2","Type":"ContainerStarted","Data":"b5f215e97ed468e7c779ebbc61a9fe184a24ac2c7cf85640df0bfac90a7502d6"} Dec 05 18:04:28 crc kubenswrapper[4753]: I1205 18:04:28.978982 4753 patch_prober.go:28] interesting pod/machine-config-daemon-khn68 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 18:04:28 crc kubenswrapper[4753]: I1205 18:04:28.979629 4753 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 18:04:41 crc kubenswrapper[4753]: E1205 18:04:41.288960 4753 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified" Dec 05 18:04:41 crc kubenswrapper[4753]: E1205 18:04:41.289892 4753 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:tempest-tests-tempest-tests-runner,Image:quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:false,MountPath:/etc/test_operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-workdir,ReadOnly:false,MountPath:/var/lib/tempest,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-temporary,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-logs,ReadOnly:false,MountPath:/var/lib/tempest/external_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/etc/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/var/lib/tempest/.config/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/etc/openstack/secure.yaml,SubPath:secure.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ca-certs,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ssh-key,ReadOnly:false,MountPath:/var/lib/tempest/id_ecdsa,SubPath:ssh_key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-gxjb6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42480,RunAsNonRoot:*false,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:*true,RunAsGroup:*42480,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-custom-data-s0,},Optional:nil,},SecretRef:nil,},EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-env-vars-s0,},Optional:nil,},SecretRef:nil,},},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod tempest-tests-tempest_openstack(230fdd26-f37e-4a32-a261-efdb39dc8de2): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 18:04:41 crc kubenswrapper[4753]: E1205 18:04:41.291120 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/tempest-tests-tempest" podUID="230fdd26-f37e-4a32-a261-efdb39dc8de2" Dec 05 18:04:41 crc kubenswrapper[4753]: E1205 18:04:41.437682 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified\\\"\"" pod="openstack/tempest-tests-tempest" podUID="230fdd26-f37e-4a32-a261-efdb39dc8de2" Dec 05 18:04:57 crc kubenswrapper[4753]: I1205 18:04:57.151835 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Dec 05 18:04:58 crc kubenswrapper[4753]: I1205 18:04:58.693428 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"230fdd26-f37e-4a32-a261-efdb39dc8de2","Type":"ContainerStarted","Data":"5574312f1cf7fded571e94d6501d43f53395cf914fc6322bc597036b4bf1c0e1"} Dec 05 18:04:58 crc kubenswrapper[4753]: I1205 18:04:58.724585 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tempest-tests-tempest" podStartSLOduration=3.780849864 podStartE2EDuration="53.724566982s" podCreationTimestamp="2025-12-05 18:04:05 +0000 UTC" firstStartedPulling="2025-12-05 18:04:07.205367653 +0000 UTC m=+3585.708474669" lastFinishedPulling="2025-12-05 18:04:57.149084781 +0000 UTC m=+3635.652191787" observedRunningTime="2025-12-05 18:04:58.711676326 +0000 UTC m=+3637.214783332" watchObservedRunningTime="2025-12-05 18:04:58.724566982 +0000 UTC m=+3637.227673988" Dec 05 18:04:58 crc kubenswrapper[4753]: I1205 18:04:58.979550 4753 patch_prober.go:28] interesting pod/machine-config-daemon-khn68 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 18:04:58 crc kubenswrapper[4753]: I1205 18:04:58.979626 4753 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 18:04:58 crc kubenswrapper[4753]: I1205 18:04:58.979695 4753 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-khn68" Dec 05 18:04:58 crc kubenswrapper[4753]: I1205 18:04:58.980386 4753 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"252cc3357a5fa9d05ffa340d7bbb2b4848173e79f5fc3938e530e663ff04ce0f"} pod="openshift-machine-config-operator/machine-config-daemon-khn68" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 18:04:58 crc kubenswrapper[4753]: I1205 18:04:58.980440 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerName="machine-config-daemon" containerID="cri-o://252cc3357a5fa9d05ffa340d7bbb2b4848173e79f5fc3938e530e663ff04ce0f" gracePeriod=600 Dec 05 18:04:59 crc kubenswrapper[4753]: I1205 18:04:59.708066 4753 generic.go:334] "Generic (PLEG): container finished" podID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerID="252cc3357a5fa9d05ffa340d7bbb2b4848173e79f5fc3938e530e663ff04ce0f" exitCode=0 Dec 05 18:04:59 crc kubenswrapper[4753]: I1205 18:04:59.708452 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" event={"ID":"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68","Type":"ContainerDied","Data":"252cc3357a5fa9d05ffa340d7bbb2b4848173e79f5fc3938e530e663ff04ce0f"} Dec 05 18:04:59 crc kubenswrapper[4753]: I1205 18:04:59.708539 4753 scope.go:117] "RemoveContainer" containerID="ac6f1e1e98711fb4ac9c08b197f72af512a76d659ae2deed4807bbfa0559c18e" Dec 05 18:05:00 crc kubenswrapper[4753]: I1205 18:05:00.738089 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" event={"ID":"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68","Type":"ContainerStarted","Data":"27e4b05ebb2bfc11b01eef3ef253deba1eb448fba7ef2f01139f1ff275bdb469"} Dec 05 18:07:28 crc kubenswrapper[4753]: I1205 18:07:28.979568 4753 patch_prober.go:28] interesting pod/machine-config-daemon-khn68 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 18:07:28 crc kubenswrapper[4753]: I1205 18:07:28.980248 4753 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 18:07:58 crc kubenswrapper[4753]: I1205 18:07:58.979205 4753 patch_prober.go:28] interesting pod/machine-config-daemon-khn68 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 18:07:58 crc kubenswrapper[4753]: I1205 18:07:58.979792 4753 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 18:08:28 crc kubenswrapper[4753]: I1205 18:08:28.978659 4753 patch_prober.go:28] interesting pod/machine-config-daemon-khn68 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 18:08:28 crc kubenswrapper[4753]: I1205 18:08:28.979083 4753 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 18:08:28 crc kubenswrapper[4753]: I1205 18:08:28.979123 4753 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-khn68" Dec 05 18:08:28 crc kubenswrapper[4753]: I1205 18:08:28.979926 4753 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"27e4b05ebb2bfc11b01eef3ef253deba1eb448fba7ef2f01139f1ff275bdb469"} pod="openshift-machine-config-operator/machine-config-daemon-khn68" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 18:08:28 crc kubenswrapper[4753]: I1205 18:08:28.979979 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerName="machine-config-daemon" containerID="cri-o://27e4b05ebb2bfc11b01eef3ef253deba1eb448fba7ef2f01139f1ff275bdb469" gracePeriod=600 Dec 05 18:08:29 crc kubenswrapper[4753]: E1205 18:08:29.128471 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 18:08:29 crc kubenswrapper[4753]: I1205 18:08:29.921785 4753 generic.go:334] "Generic (PLEG): container finished" podID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerID="27e4b05ebb2bfc11b01eef3ef253deba1eb448fba7ef2f01139f1ff275bdb469" exitCode=0 Dec 05 18:08:29 crc kubenswrapper[4753]: I1205 18:08:29.921853 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" event={"ID":"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68","Type":"ContainerDied","Data":"27e4b05ebb2bfc11b01eef3ef253deba1eb448fba7ef2f01139f1ff275bdb469"} Dec 05 18:08:29 crc kubenswrapper[4753]: I1205 18:08:29.922105 4753 scope.go:117] "RemoveContainer" containerID="252cc3357a5fa9d05ffa340d7bbb2b4848173e79f5fc3938e530e663ff04ce0f" Dec 05 18:08:29 crc kubenswrapper[4753]: I1205 18:08:29.922827 4753 scope.go:117] "RemoveContainer" containerID="27e4b05ebb2bfc11b01eef3ef253deba1eb448fba7ef2f01139f1ff275bdb469" Dec 05 18:08:29 crc kubenswrapper[4753]: E1205 18:08:29.923113 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 18:08:43 crc kubenswrapper[4753]: I1205 18:08:43.720904 4753 scope.go:117] "RemoveContainer" containerID="27e4b05ebb2bfc11b01eef3ef253deba1eb448fba7ef2f01139f1ff275bdb469" Dec 05 18:08:43 crc kubenswrapper[4753]: E1205 18:08:43.721740 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 18:08:55 crc kubenswrapper[4753]: I1205 18:08:55.721334 4753 scope.go:117] "RemoveContainer" containerID="27e4b05ebb2bfc11b01eef3ef253deba1eb448fba7ef2f01139f1ff275bdb469" Dec 05 18:08:55 crc kubenswrapper[4753]: E1205 18:08:55.723074 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 18:09:06 crc kubenswrapper[4753]: I1205 18:09:06.720768 4753 scope.go:117] "RemoveContainer" containerID="27e4b05ebb2bfc11b01eef3ef253deba1eb448fba7ef2f01139f1ff275bdb469" Dec 05 18:09:06 crc kubenswrapper[4753]: E1205 18:09:06.721519 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 18:09:17 crc kubenswrapper[4753]: I1205 18:09:17.720586 4753 scope.go:117] "RemoveContainer" containerID="27e4b05ebb2bfc11b01eef3ef253deba1eb448fba7ef2f01139f1ff275bdb469" Dec 05 18:09:17 crc kubenswrapper[4753]: E1205 18:09:17.721283 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 18:09:30 crc kubenswrapper[4753]: I1205 18:09:30.721001 4753 scope.go:117] "RemoveContainer" containerID="27e4b05ebb2bfc11b01eef3ef253deba1eb448fba7ef2f01139f1ff275bdb469" Dec 05 18:09:30 crc kubenswrapper[4753]: E1205 18:09:30.721758 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 18:09:44 crc kubenswrapper[4753]: I1205 18:09:44.721732 4753 scope.go:117] "RemoveContainer" containerID="27e4b05ebb2bfc11b01eef3ef253deba1eb448fba7ef2f01139f1ff275bdb469" Dec 05 18:09:44 crc kubenswrapper[4753]: E1205 18:09:44.722878 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 18:09:57 crc kubenswrapper[4753]: I1205 18:09:57.720971 4753 scope.go:117] "RemoveContainer" containerID="27e4b05ebb2bfc11b01eef3ef253deba1eb448fba7ef2f01139f1ff275bdb469" Dec 05 18:09:57 crc kubenswrapper[4753]: E1205 18:09:57.721701 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 18:10:12 crc kubenswrapper[4753]: I1205 18:10:12.721070 4753 scope.go:117] "RemoveContainer" containerID="27e4b05ebb2bfc11b01eef3ef253deba1eb448fba7ef2f01139f1ff275bdb469" Dec 05 18:10:12 crc kubenswrapper[4753]: E1205 18:10:12.721849 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 18:10:26 crc kubenswrapper[4753]: I1205 18:10:26.720421 4753 scope.go:117] "RemoveContainer" containerID="27e4b05ebb2bfc11b01eef3ef253deba1eb448fba7ef2f01139f1ff275bdb469" Dec 05 18:10:26 crc kubenswrapper[4753]: E1205 18:10:26.721353 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 18:10:30 crc kubenswrapper[4753]: I1205 18:10:30.579451 4753 generic.go:334] "Generic (PLEG): container finished" podID="230fdd26-f37e-4a32-a261-efdb39dc8de2" containerID="5574312f1cf7fded571e94d6501d43f53395cf914fc6322bc597036b4bf1c0e1" exitCode=0 Dec 05 18:10:30 crc kubenswrapper[4753]: I1205 18:10:30.579535 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"230fdd26-f37e-4a32-a261-efdb39dc8de2","Type":"ContainerDied","Data":"5574312f1cf7fded571e94d6501d43f53395cf914fc6322bc597036b4bf1c0e1"} Dec 05 18:10:32 crc kubenswrapper[4753]: I1205 18:10:32.193931 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 05 18:10:32 crc kubenswrapper[4753]: I1205 18:10:32.315406 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/230fdd26-f37e-4a32-a261-efdb39dc8de2-ssh-key\") pod \"230fdd26-f37e-4a32-a261-efdb39dc8de2\" (UID: \"230fdd26-f37e-4a32-a261-efdb39dc8de2\") " Dec 05 18:10:32 crc kubenswrapper[4753]: I1205 18:10:32.315485 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gxjb6\" (UniqueName: \"kubernetes.io/projected/230fdd26-f37e-4a32-a261-efdb39dc8de2-kube-api-access-gxjb6\") pod \"230fdd26-f37e-4a32-a261-efdb39dc8de2\" (UID: \"230fdd26-f37e-4a32-a261-efdb39dc8de2\") " Dec 05 18:10:32 crc kubenswrapper[4753]: I1205 18:10:32.315520 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/230fdd26-f37e-4a32-a261-efdb39dc8de2-ca-certs\") pod \"230fdd26-f37e-4a32-a261-efdb39dc8de2\" (UID: \"230fdd26-f37e-4a32-a261-efdb39dc8de2\") " Dec 05 18:10:32 crc kubenswrapper[4753]: I1205 18:10:32.315573 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/230fdd26-f37e-4a32-a261-efdb39dc8de2-config-data\") pod \"230fdd26-f37e-4a32-a261-efdb39dc8de2\" (UID: \"230fdd26-f37e-4a32-a261-efdb39dc8de2\") " Dec 05 18:10:32 crc kubenswrapper[4753]: I1205 18:10:32.315599 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-logs\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"230fdd26-f37e-4a32-a261-efdb39dc8de2\" (UID: \"230fdd26-f37e-4a32-a261-efdb39dc8de2\") " Dec 05 18:10:32 crc kubenswrapper[4753]: I1205 18:10:32.315727 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/230fdd26-f37e-4a32-a261-efdb39dc8de2-openstack-config-secret\") pod \"230fdd26-f37e-4a32-a261-efdb39dc8de2\" (UID: \"230fdd26-f37e-4a32-a261-efdb39dc8de2\") " Dec 05 18:10:32 crc kubenswrapper[4753]: I1205 18:10:32.315781 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/230fdd26-f37e-4a32-a261-efdb39dc8de2-openstack-config\") pod \"230fdd26-f37e-4a32-a261-efdb39dc8de2\" (UID: \"230fdd26-f37e-4a32-a261-efdb39dc8de2\") " Dec 05 18:10:32 crc kubenswrapper[4753]: I1205 18:10:32.315817 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/230fdd26-f37e-4a32-a261-efdb39dc8de2-test-operator-ephemeral-temporary\") pod \"230fdd26-f37e-4a32-a261-efdb39dc8de2\" (UID: \"230fdd26-f37e-4a32-a261-efdb39dc8de2\") " Dec 05 18:10:32 crc kubenswrapper[4753]: I1205 18:10:32.315907 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/230fdd26-f37e-4a32-a261-efdb39dc8de2-test-operator-ephemeral-workdir\") pod \"230fdd26-f37e-4a32-a261-efdb39dc8de2\" (UID: \"230fdd26-f37e-4a32-a261-efdb39dc8de2\") " Dec 05 18:10:32 crc kubenswrapper[4753]: I1205 18:10:32.316861 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/230fdd26-f37e-4a32-a261-efdb39dc8de2-config-data" (OuterVolumeSpecName: "config-data") pod "230fdd26-f37e-4a32-a261-efdb39dc8de2" (UID: "230fdd26-f37e-4a32-a261-efdb39dc8de2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 18:10:32 crc kubenswrapper[4753]: I1205 18:10:32.317129 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/230fdd26-f37e-4a32-a261-efdb39dc8de2-test-operator-ephemeral-temporary" (OuterVolumeSpecName: "test-operator-ephemeral-temporary") pod "230fdd26-f37e-4a32-a261-efdb39dc8de2" (UID: "230fdd26-f37e-4a32-a261-efdb39dc8de2"). InnerVolumeSpecName "test-operator-ephemeral-temporary". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 18:10:32 crc kubenswrapper[4753]: I1205 18:10:32.322454 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "test-operator-logs") pod "230fdd26-f37e-4a32-a261-efdb39dc8de2" (UID: "230fdd26-f37e-4a32-a261-efdb39dc8de2"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 18:10:32 crc kubenswrapper[4753]: I1205 18:10:32.325583 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/230fdd26-f37e-4a32-a261-efdb39dc8de2-kube-api-access-gxjb6" (OuterVolumeSpecName: "kube-api-access-gxjb6") pod "230fdd26-f37e-4a32-a261-efdb39dc8de2" (UID: "230fdd26-f37e-4a32-a261-efdb39dc8de2"). InnerVolumeSpecName "kube-api-access-gxjb6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 18:10:32 crc kubenswrapper[4753]: I1205 18:10:32.350167 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/230fdd26-f37e-4a32-a261-efdb39dc8de2-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "230fdd26-f37e-4a32-a261-efdb39dc8de2" (UID: "230fdd26-f37e-4a32-a261-efdb39dc8de2"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:10:32 crc kubenswrapper[4753]: I1205 18:10:32.361317 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/230fdd26-f37e-4a32-a261-efdb39dc8de2-ca-certs" (OuterVolumeSpecName: "ca-certs") pod "230fdd26-f37e-4a32-a261-efdb39dc8de2" (UID: "230fdd26-f37e-4a32-a261-efdb39dc8de2"). InnerVolumeSpecName "ca-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:10:32 crc kubenswrapper[4753]: I1205 18:10:32.366944 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/230fdd26-f37e-4a32-a261-efdb39dc8de2-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "230fdd26-f37e-4a32-a261-efdb39dc8de2" (UID: "230fdd26-f37e-4a32-a261-efdb39dc8de2"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:10:32 crc kubenswrapper[4753]: I1205 18:10:32.384944 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/230fdd26-f37e-4a32-a261-efdb39dc8de2-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "230fdd26-f37e-4a32-a261-efdb39dc8de2" (UID: "230fdd26-f37e-4a32-a261-efdb39dc8de2"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 18:10:32 crc kubenswrapper[4753]: I1205 18:10:32.417963 4753 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/230fdd26-f37e-4a32-a261-efdb39dc8de2-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 18:10:32 crc kubenswrapper[4753]: I1205 18:10:32.417988 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gxjb6\" (UniqueName: \"kubernetes.io/projected/230fdd26-f37e-4a32-a261-efdb39dc8de2-kube-api-access-gxjb6\") on node \"crc\" DevicePath \"\"" Dec 05 18:10:32 crc kubenswrapper[4753]: I1205 18:10:32.417997 4753 reconciler_common.go:293] "Volume detached for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/230fdd26-f37e-4a32-a261-efdb39dc8de2-ca-certs\") on node \"crc\" DevicePath \"\"" Dec 05 18:10:32 crc kubenswrapper[4753]: I1205 18:10:32.418007 4753 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/230fdd26-f37e-4a32-a261-efdb39dc8de2-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 18:10:32 crc kubenswrapper[4753]: I1205 18:10:32.418028 4753 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Dec 05 18:10:32 crc kubenswrapper[4753]: I1205 18:10:32.418037 4753 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/230fdd26-f37e-4a32-a261-efdb39dc8de2-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Dec 05 18:10:32 crc kubenswrapper[4753]: I1205 18:10:32.418046 4753 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/230fdd26-f37e-4a32-a261-efdb39dc8de2-openstack-config\") on node \"crc\" DevicePath \"\"" Dec 05 18:10:32 crc kubenswrapper[4753]: I1205 18:10:32.418055 4753 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/230fdd26-f37e-4a32-a261-efdb39dc8de2-test-operator-ephemeral-temporary\") on node \"crc\" DevicePath \"\"" Dec 05 18:10:32 crc kubenswrapper[4753]: I1205 18:10:32.445321 4753 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Dec 05 18:10:32 crc kubenswrapper[4753]: I1205 18:10:32.521315 4753 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Dec 05 18:10:32 crc kubenswrapper[4753]: I1205 18:10:32.620408 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"230fdd26-f37e-4a32-a261-efdb39dc8de2","Type":"ContainerDied","Data":"b5f215e97ed468e7c779ebbc61a9fe184a24ac2c7cf85640df0bfac90a7502d6"} Dec 05 18:10:32 crc kubenswrapper[4753]: I1205 18:10:32.620460 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b5f215e97ed468e7c779ebbc61a9fe184a24ac2c7cf85640df0bfac90a7502d6" Dec 05 18:10:32 crc kubenswrapper[4753]: I1205 18:10:32.620518 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 05 18:10:32 crc kubenswrapper[4753]: I1205 18:10:32.740933 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/230fdd26-f37e-4a32-a261-efdb39dc8de2-test-operator-ephemeral-workdir" (OuterVolumeSpecName: "test-operator-ephemeral-workdir") pod "230fdd26-f37e-4a32-a261-efdb39dc8de2" (UID: "230fdd26-f37e-4a32-a261-efdb39dc8de2"). InnerVolumeSpecName "test-operator-ephemeral-workdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 18:10:32 crc kubenswrapper[4753]: I1205 18:10:32.828548 4753 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/230fdd26-f37e-4a32-a261-efdb39dc8de2-test-operator-ephemeral-workdir\") on node \"crc\" DevicePath \"\"" Dec 05 18:10:34 crc kubenswrapper[4753]: I1205 18:10:34.758346 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 05 18:10:34 crc kubenswrapper[4753]: E1205 18:10:34.759345 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="230fdd26-f37e-4a32-a261-efdb39dc8de2" containerName="tempest-tests-tempest-tests-runner" Dec 05 18:10:34 crc kubenswrapper[4753]: I1205 18:10:34.759366 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="230fdd26-f37e-4a32-a261-efdb39dc8de2" containerName="tempest-tests-tempest-tests-runner" Dec 05 18:10:34 crc kubenswrapper[4753]: I1205 18:10:34.759679 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="230fdd26-f37e-4a32-a261-efdb39dc8de2" containerName="tempest-tests-tempest-tests-runner" Dec 05 18:10:34 crc kubenswrapper[4753]: I1205 18:10:34.760841 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 18:10:34 crc kubenswrapper[4753]: I1205 18:10:34.763953 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-sgm52" Dec 05 18:10:34 crc kubenswrapper[4753]: I1205 18:10:34.787220 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 05 18:10:34 crc kubenswrapper[4753]: I1205 18:10:34.869144 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"2c8ba284-0ce6-4fd1-af48-9953b5b14d55\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 18:10:34 crc kubenswrapper[4753]: I1205 18:10:34.869240 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-57hpj\" (UniqueName: \"kubernetes.io/projected/2c8ba284-0ce6-4fd1-af48-9953b5b14d55-kube-api-access-57hpj\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"2c8ba284-0ce6-4fd1-af48-9953b5b14d55\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 18:10:34 crc kubenswrapper[4753]: I1205 18:10:34.971349 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"2c8ba284-0ce6-4fd1-af48-9953b5b14d55\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 18:10:34 crc kubenswrapper[4753]: I1205 18:10:34.971602 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-57hpj\" (UniqueName: \"kubernetes.io/projected/2c8ba284-0ce6-4fd1-af48-9953b5b14d55-kube-api-access-57hpj\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"2c8ba284-0ce6-4fd1-af48-9953b5b14d55\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 18:10:34 crc kubenswrapper[4753]: I1205 18:10:34.972243 4753 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"2c8ba284-0ce6-4fd1-af48-9953b5b14d55\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 18:10:35 crc kubenswrapper[4753]: I1205 18:10:35.006712 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-57hpj\" (UniqueName: \"kubernetes.io/projected/2c8ba284-0ce6-4fd1-af48-9953b5b14d55-kube-api-access-57hpj\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"2c8ba284-0ce6-4fd1-af48-9953b5b14d55\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 18:10:35 crc kubenswrapper[4753]: I1205 18:10:35.011178 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"2c8ba284-0ce6-4fd1-af48-9953b5b14d55\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 18:10:35 crc kubenswrapper[4753]: I1205 18:10:35.102363 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 18:10:35 crc kubenswrapper[4753]: I1205 18:10:35.700320 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 05 18:10:35 crc kubenswrapper[4753]: I1205 18:10:35.721078 4753 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 18:10:36 crc kubenswrapper[4753]: I1205 18:10:36.666929 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"2c8ba284-0ce6-4fd1-af48-9953b5b14d55","Type":"ContainerStarted","Data":"47f00257ed956835daf14451b38bbb09f4cb32a4ff20781962c40ca25afb3ad6"} Dec 05 18:10:37 crc kubenswrapper[4753]: I1205 18:10:37.679378 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"2c8ba284-0ce6-4fd1-af48-9953b5b14d55","Type":"ContainerStarted","Data":"9984502ec07c79c8cee7dbb80f9318586314f5a3d24f638dc9f3d85bb7b1e6d7"} Dec 05 18:10:37 crc kubenswrapper[4753]: I1205 18:10:37.697276 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" podStartSLOduration=2.6841019470000003 podStartE2EDuration="3.697256681s" podCreationTimestamp="2025-12-05 18:10:34 +0000 UTC" firstStartedPulling="2025-12-05 18:10:35.72088275 +0000 UTC m=+3974.223989756" lastFinishedPulling="2025-12-05 18:10:36.734037484 +0000 UTC m=+3975.237144490" observedRunningTime="2025-12-05 18:10:37.692745103 +0000 UTC m=+3976.195852109" watchObservedRunningTime="2025-12-05 18:10:37.697256681 +0000 UTC m=+3976.200363697" Dec 05 18:10:39 crc kubenswrapper[4753]: I1205 18:10:39.720664 4753 scope.go:117] "RemoveContainer" containerID="27e4b05ebb2bfc11b01eef3ef253deba1eb448fba7ef2f01139f1ff275bdb469" Dec 05 18:10:39 crc kubenswrapper[4753]: E1205 18:10:39.722140 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 18:10:54 crc kubenswrapper[4753]: I1205 18:10:54.721446 4753 scope.go:117] "RemoveContainer" containerID="27e4b05ebb2bfc11b01eef3ef253deba1eb448fba7ef2f01139f1ff275bdb469" Dec 05 18:10:54 crc kubenswrapper[4753]: E1205 18:10:54.722753 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 18:11:00 crc kubenswrapper[4753]: I1205 18:11:00.199812 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-whd89/must-gather-7jh4t"] Dec 05 18:11:00 crc kubenswrapper[4753]: I1205 18:11:00.204384 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-whd89/must-gather-7jh4t" Dec 05 18:11:00 crc kubenswrapper[4753]: I1205 18:11:00.211031 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-whd89"/"openshift-service-ca.crt" Dec 05 18:11:00 crc kubenswrapper[4753]: I1205 18:11:00.211270 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-whd89"/"kube-root-ca.crt" Dec 05 18:11:00 crc kubenswrapper[4753]: I1205 18:11:00.211178 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-whd89"/"default-dockercfg-9mdjp" Dec 05 18:11:00 crc kubenswrapper[4753]: I1205 18:11:00.248946 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-whd89/must-gather-7jh4t"] Dec 05 18:11:00 crc kubenswrapper[4753]: I1205 18:11:00.250334 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/f551b736-2665-40a5-a843-dc960cfe8f06-must-gather-output\") pod \"must-gather-7jh4t\" (UID: \"f551b736-2665-40a5-a843-dc960cfe8f06\") " pod="openshift-must-gather-whd89/must-gather-7jh4t" Dec 05 18:11:00 crc kubenswrapper[4753]: I1205 18:11:00.250441 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sv5hr\" (UniqueName: \"kubernetes.io/projected/f551b736-2665-40a5-a843-dc960cfe8f06-kube-api-access-sv5hr\") pod \"must-gather-7jh4t\" (UID: \"f551b736-2665-40a5-a843-dc960cfe8f06\") " pod="openshift-must-gather-whd89/must-gather-7jh4t" Dec 05 18:11:00 crc kubenswrapper[4753]: I1205 18:11:00.352555 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/f551b736-2665-40a5-a843-dc960cfe8f06-must-gather-output\") pod \"must-gather-7jh4t\" (UID: \"f551b736-2665-40a5-a843-dc960cfe8f06\") " pod="openshift-must-gather-whd89/must-gather-7jh4t" Dec 05 18:11:00 crc kubenswrapper[4753]: I1205 18:11:00.352799 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sv5hr\" (UniqueName: \"kubernetes.io/projected/f551b736-2665-40a5-a843-dc960cfe8f06-kube-api-access-sv5hr\") pod \"must-gather-7jh4t\" (UID: \"f551b736-2665-40a5-a843-dc960cfe8f06\") " pod="openshift-must-gather-whd89/must-gather-7jh4t" Dec 05 18:11:00 crc kubenswrapper[4753]: I1205 18:11:00.353032 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/f551b736-2665-40a5-a843-dc960cfe8f06-must-gather-output\") pod \"must-gather-7jh4t\" (UID: \"f551b736-2665-40a5-a843-dc960cfe8f06\") " pod="openshift-must-gather-whd89/must-gather-7jh4t" Dec 05 18:11:00 crc kubenswrapper[4753]: I1205 18:11:00.385827 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sv5hr\" (UniqueName: \"kubernetes.io/projected/f551b736-2665-40a5-a843-dc960cfe8f06-kube-api-access-sv5hr\") pod \"must-gather-7jh4t\" (UID: \"f551b736-2665-40a5-a843-dc960cfe8f06\") " pod="openshift-must-gather-whd89/must-gather-7jh4t" Dec 05 18:11:00 crc kubenswrapper[4753]: I1205 18:11:00.538650 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-whd89/must-gather-7jh4t" Dec 05 18:11:01 crc kubenswrapper[4753]: I1205 18:11:01.068590 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-whd89/must-gather-7jh4t"] Dec 05 18:11:01 crc kubenswrapper[4753]: I1205 18:11:01.923716 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-whd89/must-gather-7jh4t" event={"ID":"f551b736-2665-40a5-a843-dc960cfe8f06","Type":"ContainerStarted","Data":"e98e81aa1f9073cc93ff9cc494af28f302afc1f96f1f010d4dcea20f10c9279b"} Dec 05 18:11:03 crc kubenswrapper[4753]: I1205 18:11:03.626476 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-rgkwc"] Dec 05 18:11:03 crc kubenswrapper[4753]: I1205 18:11:03.629371 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rgkwc" Dec 05 18:11:03 crc kubenswrapper[4753]: I1205 18:11:03.649890 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-rgkwc"] Dec 05 18:11:03 crc kubenswrapper[4753]: I1205 18:11:03.740973 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3a9e2b4d-17c1-4c65-a9fa-95158a124167-utilities\") pod \"redhat-marketplace-rgkwc\" (UID: \"3a9e2b4d-17c1-4c65-a9fa-95158a124167\") " pod="openshift-marketplace/redhat-marketplace-rgkwc" Dec 05 18:11:03 crc kubenswrapper[4753]: I1205 18:11:03.741134 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3a9e2b4d-17c1-4c65-a9fa-95158a124167-catalog-content\") pod \"redhat-marketplace-rgkwc\" (UID: \"3a9e2b4d-17c1-4c65-a9fa-95158a124167\") " pod="openshift-marketplace/redhat-marketplace-rgkwc" Dec 05 18:11:03 crc kubenswrapper[4753]: I1205 18:11:03.741440 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v7mlv\" (UniqueName: \"kubernetes.io/projected/3a9e2b4d-17c1-4c65-a9fa-95158a124167-kube-api-access-v7mlv\") pod \"redhat-marketplace-rgkwc\" (UID: \"3a9e2b4d-17c1-4c65-a9fa-95158a124167\") " pod="openshift-marketplace/redhat-marketplace-rgkwc" Dec 05 18:11:03 crc kubenswrapper[4753]: I1205 18:11:03.843642 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v7mlv\" (UniqueName: \"kubernetes.io/projected/3a9e2b4d-17c1-4c65-a9fa-95158a124167-kube-api-access-v7mlv\") pod \"redhat-marketplace-rgkwc\" (UID: \"3a9e2b4d-17c1-4c65-a9fa-95158a124167\") " pod="openshift-marketplace/redhat-marketplace-rgkwc" Dec 05 18:11:03 crc kubenswrapper[4753]: I1205 18:11:03.843770 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3a9e2b4d-17c1-4c65-a9fa-95158a124167-utilities\") pod \"redhat-marketplace-rgkwc\" (UID: \"3a9e2b4d-17c1-4c65-a9fa-95158a124167\") " pod="openshift-marketplace/redhat-marketplace-rgkwc" Dec 05 18:11:03 crc kubenswrapper[4753]: I1205 18:11:03.843818 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3a9e2b4d-17c1-4c65-a9fa-95158a124167-catalog-content\") pod \"redhat-marketplace-rgkwc\" (UID: \"3a9e2b4d-17c1-4c65-a9fa-95158a124167\") " pod="openshift-marketplace/redhat-marketplace-rgkwc" Dec 05 18:11:03 crc kubenswrapper[4753]: I1205 18:11:03.844966 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3a9e2b4d-17c1-4c65-a9fa-95158a124167-utilities\") pod \"redhat-marketplace-rgkwc\" (UID: \"3a9e2b4d-17c1-4c65-a9fa-95158a124167\") " pod="openshift-marketplace/redhat-marketplace-rgkwc" Dec 05 18:11:03 crc kubenswrapper[4753]: I1205 18:11:03.845264 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3a9e2b4d-17c1-4c65-a9fa-95158a124167-catalog-content\") pod \"redhat-marketplace-rgkwc\" (UID: \"3a9e2b4d-17c1-4c65-a9fa-95158a124167\") " pod="openshift-marketplace/redhat-marketplace-rgkwc" Dec 05 18:11:03 crc kubenswrapper[4753]: I1205 18:11:03.867712 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v7mlv\" (UniqueName: \"kubernetes.io/projected/3a9e2b4d-17c1-4c65-a9fa-95158a124167-kube-api-access-v7mlv\") pod \"redhat-marketplace-rgkwc\" (UID: \"3a9e2b4d-17c1-4c65-a9fa-95158a124167\") " pod="openshift-marketplace/redhat-marketplace-rgkwc" Dec 05 18:11:03 crc kubenswrapper[4753]: I1205 18:11:03.957194 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rgkwc" Dec 05 18:11:06 crc kubenswrapper[4753]: I1205 18:11:06.453760 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-rgkwc"] Dec 05 18:11:06 crc kubenswrapper[4753]: I1205 18:11:06.983921 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-whd89/must-gather-7jh4t" event={"ID":"f551b736-2665-40a5-a843-dc960cfe8f06","Type":"ContainerStarted","Data":"44e054548b9b769d99c46b93aecd39a16da343ea6e381ba0e75a8e458b46fbb4"} Dec 05 18:11:06 crc kubenswrapper[4753]: I1205 18:11:06.984321 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-whd89/must-gather-7jh4t" event={"ID":"f551b736-2665-40a5-a843-dc960cfe8f06","Type":"ContainerStarted","Data":"c1d23686d38f373dac59c902de472beb8e500c9178572b2dab94c26f575c05af"} Dec 05 18:11:06 crc kubenswrapper[4753]: I1205 18:11:06.985992 4753 generic.go:334] "Generic (PLEG): container finished" podID="3a9e2b4d-17c1-4c65-a9fa-95158a124167" containerID="cfcef0fb8ceb840d3511d426d79c0de9ebf50b12d38e1e3cd5b0d1e35e87706d" exitCode=0 Dec 05 18:11:06 crc kubenswrapper[4753]: I1205 18:11:06.986066 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rgkwc" event={"ID":"3a9e2b4d-17c1-4c65-a9fa-95158a124167","Type":"ContainerDied","Data":"cfcef0fb8ceb840d3511d426d79c0de9ebf50b12d38e1e3cd5b0d1e35e87706d"} Dec 05 18:11:06 crc kubenswrapper[4753]: I1205 18:11:06.986106 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rgkwc" event={"ID":"3a9e2b4d-17c1-4c65-a9fa-95158a124167","Type":"ContainerStarted","Data":"66341299dabbfb90a48c095971a821310a587d42fc26aa863519c65cc5c4f8e0"} Dec 05 18:11:07 crc kubenswrapper[4753]: I1205 18:11:07.021958 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-whd89/must-gather-7jh4t" podStartSLOduration=2.083798772 podStartE2EDuration="7.02194068s" podCreationTimestamp="2025-12-05 18:11:00 +0000 UTC" firstStartedPulling="2025-12-05 18:11:01.064519224 +0000 UTC m=+3999.567626240" lastFinishedPulling="2025-12-05 18:11:06.002661132 +0000 UTC m=+4004.505768148" observedRunningTime="2025-12-05 18:11:07.012536853 +0000 UTC m=+4005.515643859" watchObservedRunningTime="2025-12-05 18:11:07.02194068 +0000 UTC m=+4005.525047686" Dec 05 18:11:07 crc kubenswrapper[4753]: I1205 18:11:07.721582 4753 scope.go:117] "RemoveContainer" containerID="27e4b05ebb2bfc11b01eef3ef253deba1eb448fba7ef2f01139f1ff275bdb469" Dec 05 18:11:07 crc kubenswrapper[4753]: E1205 18:11:07.723143 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 18:11:08 crc kubenswrapper[4753]: I1205 18:11:08.001049 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rgkwc" event={"ID":"3a9e2b4d-17c1-4c65-a9fa-95158a124167","Type":"ContainerStarted","Data":"fb70f2342ee9f471f342e4fd4515bf6060844495d6b72f68a081c6c3d424003a"} Dec 05 18:11:09 crc kubenswrapper[4753]: I1205 18:11:09.014105 4753 generic.go:334] "Generic (PLEG): container finished" podID="3a9e2b4d-17c1-4c65-a9fa-95158a124167" containerID="fb70f2342ee9f471f342e4fd4515bf6060844495d6b72f68a081c6c3d424003a" exitCode=0 Dec 05 18:11:09 crc kubenswrapper[4753]: I1205 18:11:09.014198 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rgkwc" event={"ID":"3a9e2b4d-17c1-4c65-a9fa-95158a124167","Type":"ContainerDied","Data":"fb70f2342ee9f471f342e4fd4515bf6060844495d6b72f68a081c6c3d424003a"} Dec 05 18:11:10 crc kubenswrapper[4753]: I1205 18:11:10.028440 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rgkwc" event={"ID":"3a9e2b4d-17c1-4c65-a9fa-95158a124167","Type":"ContainerStarted","Data":"a055ea6ab014b1a30722961cf96d57376996625c57101b5d2ade34e63ed2fee3"} Dec 05 18:11:10 crc kubenswrapper[4753]: I1205 18:11:10.058224 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-rgkwc" podStartSLOduration=4.643970161 podStartE2EDuration="7.058205669s" podCreationTimestamp="2025-12-05 18:11:03 +0000 UTC" firstStartedPulling="2025-12-05 18:11:06.989089128 +0000 UTC m=+4005.492196134" lastFinishedPulling="2025-12-05 18:11:09.403324616 +0000 UTC m=+4007.906431642" observedRunningTime="2025-12-05 18:11:10.048411121 +0000 UTC m=+4008.551518137" watchObservedRunningTime="2025-12-05 18:11:10.058205669 +0000 UTC m=+4008.561312675" Dec 05 18:11:10 crc kubenswrapper[4753]: I1205 18:11:10.521045 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-whd89/crc-debug-sn7z4"] Dec 05 18:11:10 crc kubenswrapper[4753]: I1205 18:11:10.523189 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-whd89/crc-debug-sn7z4" Dec 05 18:11:10 crc kubenswrapper[4753]: I1205 18:11:10.609736 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2fr8n\" (UniqueName: \"kubernetes.io/projected/24555084-11cf-4410-8155-1dd041b18ba4-kube-api-access-2fr8n\") pod \"crc-debug-sn7z4\" (UID: \"24555084-11cf-4410-8155-1dd041b18ba4\") " pod="openshift-must-gather-whd89/crc-debug-sn7z4" Dec 05 18:11:10 crc kubenswrapper[4753]: I1205 18:11:10.609968 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/24555084-11cf-4410-8155-1dd041b18ba4-host\") pod \"crc-debug-sn7z4\" (UID: \"24555084-11cf-4410-8155-1dd041b18ba4\") " pod="openshift-must-gather-whd89/crc-debug-sn7z4" Dec 05 18:11:10 crc kubenswrapper[4753]: I1205 18:11:10.712137 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2fr8n\" (UniqueName: \"kubernetes.io/projected/24555084-11cf-4410-8155-1dd041b18ba4-kube-api-access-2fr8n\") pod \"crc-debug-sn7z4\" (UID: \"24555084-11cf-4410-8155-1dd041b18ba4\") " pod="openshift-must-gather-whd89/crc-debug-sn7z4" Dec 05 18:11:10 crc kubenswrapper[4753]: I1205 18:11:10.712387 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/24555084-11cf-4410-8155-1dd041b18ba4-host\") pod \"crc-debug-sn7z4\" (UID: \"24555084-11cf-4410-8155-1dd041b18ba4\") " pod="openshift-must-gather-whd89/crc-debug-sn7z4" Dec 05 18:11:10 crc kubenswrapper[4753]: I1205 18:11:10.712516 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/24555084-11cf-4410-8155-1dd041b18ba4-host\") pod \"crc-debug-sn7z4\" (UID: \"24555084-11cf-4410-8155-1dd041b18ba4\") " pod="openshift-must-gather-whd89/crc-debug-sn7z4" Dec 05 18:11:10 crc kubenswrapper[4753]: I1205 18:11:10.740951 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2fr8n\" (UniqueName: \"kubernetes.io/projected/24555084-11cf-4410-8155-1dd041b18ba4-kube-api-access-2fr8n\") pod \"crc-debug-sn7z4\" (UID: \"24555084-11cf-4410-8155-1dd041b18ba4\") " pod="openshift-must-gather-whd89/crc-debug-sn7z4" Dec 05 18:11:10 crc kubenswrapper[4753]: I1205 18:11:10.842990 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-whd89/crc-debug-sn7z4" Dec 05 18:11:10 crc kubenswrapper[4753]: W1205 18:11:10.887666 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod24555084_11cf_4410_8155_1dd041b18ba4.slice/crio-04017c5e80f4028dc75a3e9449bbc2f8bbeb4e57d814a8a106875e2b26e1abd9 WatchSource:0}: Error finding container 04017c5e80f4028dc75a3e9449bbc2f8bbeb4e57d814a8a106875e2b26e1abd9: Status 404 returned error can't find the container with id 04017c5e80f4028dc75a3e9449bbc2f8bbeb4e57d814a8a106875e2b26e1abd9 Dec 05 18:11:11 crc kubenswrapper[4753]: I1205 18:11:11.037534 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-whd89/crc-debug-sn7z4" event={"ID":"24555084-11cf-4410-8155-1dd041b18ba4","Type":"ContainerStarted","Data":"04017c5e80f4028dc75a3e9449bbc2f8bbeb4e57d814a8a106875e2b26e1abd9"} Dec 05 18:11:12 crc kubenswrapper[4753]: E1205 18:11:12.411736 4753 upgradeaware.go:441] Error proxying data from backend to client: writeto tcp 38.102.83.233:46804->38.102.83.233:43175: read tcp 38.102.83.233:46804->38.102.83.233:43175: read: connection reset by peer Dec 05 18:11:13 crc kubenswrapper[4753]: I1205 18:11:13.958075 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-rgkwc" Dec 05 18:11:13 crc kubenswrapper[4753]: I1205 18:11:13.959181 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-rgkwc" Dec 05 18:11:14 crc kubenswrapper[4753]: I1205 18:11:14.030184 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-rgkwc" Dec 05 18:11:14 crc kubenswrapper[4753]: I1205 18:11:14.130713 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-rgkwc" Dec 05 18:11:14 crc kubenswrapper[4753]: I1205 18:11:14.274166 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-rgkwc"] Dec 05 18:11:16 crc kubenswrapper[4753]: I1205 18:11:16.101793 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-rgkwc" podUID="3a9e2b4d-17c1-4c65-a9fa-95158a124167" containerName="registry-server" containerID="cri-o://a055ea6ab014b1a30722961cf96d57376996625c57101b5d2ade34e63ed2fee3" gracePeriod=2 Dec 05 18:11:17 crc kubenswrapper[4753]: I1205 18:11:17.113808 4753 generic.go:334] "Generic (PLEG): container finished" podID="3a9e2b4d-17c1-4c65-a9fa-95158a124167" containerID="a055ea6ab014b1a30722961cf96d57376996625c57101b5d2ade34e63ed2fee3" exitCode=0 Dec 05 18:11:17 crc kubenswrapper[4753]: I1205 18:11:17.113850 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rgkwc" event={"ID":"3a9e2b4d-17c1-4c65-a9fa-95158a124167","Type":"ContainerDied","Data":"a055ea6ab014b1a30722961cf96d57376996625c57101b5d2ade34e63ed2fee3"} Dec 05 18:11:20 crc kubenswrapper[4753]: I1205 18:11:20.720648 4753 scope.go:117] "RemoveContainer" containerID="27e4b05ebb2bfc11b01eef3ef253deba1eb448fba7ef2f01139f1ff275bdb469" Dec 05 18:11:20 crc kubenswrapper[4753]: E1205 18:11:20.721409 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 18:11:23 crc kubenswrapper[4753]: E1205 18:11:23.958278 4753 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of a055ea6ab014b1a30722961cf96d57376996625c57101b5d2ade34e63ed2fee3 is running failed: container process not found" containerID="a055ea6ab014b1a30722961cf96d57376996625c57101b5d2ade34e63ed2fee3" cmd=["grpc_health_probe","-addr=:50051"] Dec 05 18:11:23 crc kubenswrapper[4753]: E1205 18:11:23.970306 4753 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of a055ea6ab014b1a30722961cf96d57376996625c57101b5d2ade34e63ed2fee3 is running failed: container process not found" containerID="a055ea6ab014b1a30722961cf96d57376996625c57101b5d2ade34e63ed2fee3" cmd=["grpc_health_probe","-addr=:50051"] Dec 05 18:11:23 crc kubenswrapper[4753]: E1205 18:11:23.970848 4753 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of a055ea6ab014b1a30722961cf96d57376996625c57101b5d2ade34e63ed2fee3 is running failed: container process not found" containerID="a055ea6ab014b1a30722961cf96d57376996625c57101b5d2ade34e63ed2fee3" cmd=["grpc_health_probe","-addr=:50051"] Dec 05 18:11:23 crc kubenswrapper[4753]: E1205 18:11:23.970893 4753 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of a055ea6ab014b1a30722961cf96d57376996625c57101b5d2ade34e63ed2fee3 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/redhat-marketplace-rgkwc" podUID="3a9e2b4d-17c1-4c65-a9fa-95158a124167" containerName="registry-server" Dec 05 18:11:24 crc kubenswrapper[4753]: I1205 18:11:24.087811 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rgkwc" Dec 05 18:11:24 crc kubenswrapper[4753]: I1205 18:11:24.184418 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v7mlv\" (UniqueName: \"kubernetes.io/projected/3a9e2b4d-17c1-4c65-a9fa-95158a124167-kube-api-access-v7mlv\") pod \"3a9e2b4d-17c1-4c65-a9fa-95158a124167\" (UID: \"3a9e2b4d-17c1-4c65-a9fa-95158a124167\") " Dec 05 18:11:24 crc kubenswrapper[4753]: I1205 18:11:24.184798 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3a9e2b4d-17c1-4c65-a9fa-95158a124167-utilities\") pod \"3a9e2b4d-17c1-4c65-a9fa-95158a124167\" (UID: \"3a9e2b4d-17c1-4c65-a9fa-95158a124167\") " Dec 05 18:11:24 crc kubenswrapper[4753]: I1205 18:11:24.184912 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3a9e2b4d-17c1-4c65-a9fa-95158a124167-catalog-content\") pod \"3a9e2b4d-17c1-4c65-a9fa-95158a124167\" (UID: \"3a9e2b4d-17c1-4c65-a9fa-95158a124167\") " Dec 05 18:11:24 crc kubenswrapper[4753]: I1205 18:11:24.185706 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3a9e2b4d-17c1-4c65-a9fa-95158a124167-utilities" (OuterVolumeSpecName: "utilities") pod "3a9e2b4d-17c1-4c65-a9fa-95158a124167" (UID: "3a9e2b4d-17c1-4c65-a9fa-95158a124167"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 18:11:24 crc kubenswrapper[4753]: I1205 18:11:24.194095 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3a9e2b4d-17c1-4c65-a9fa-95158a124167-kube-api-access-v7mlv" (OuterVolumeSpecName: "kube-api-access-v7mlv") pod "3a9e2b4d-17c1-4c65-a9fa-95158a124167" (UID: "3a9e2b4d-17c1-4c65-a9fa-95158a124167"). InnerVolumeSpecName "kube-api-access-v7mlv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 18:11:24 crc kubenswrapper[4753]: I1205 18:11:24.195858 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rgkwc" event={"ID":"3a9e2b4d-17c1-4c65-a9fa-95158a124167","Type":"ContainerDied","Data":"66341299dabbfb90a48c095971a821310a587d42fc26aa863519c65cc5c4f8e0"} Dec 05 18:11:24 crc kubenswrapper[4753]: I1205 18:11:24.195915 4753 scope.go:117] "RemoveContainer" containerID="a055ea6ab014b1a30722961cf96d57376996625c57101b5d2ade34e63ed2fee3" Dec 05 18:11:24 crc kubenswrapper[4753]: I1205 18:11:24.196055 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rgkwc" Dec 05 18:11:24 crc kubenswrapper[4753]: I1205 18:11:24.197981 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-whd89/crc-debug-sn7z4" event={"ID":"24555084-11cf-4410-8155-1dd041b18ba4","Type":"ContainerStarted","Data":"37ad614dcc6ddce3f70e1e9d25e43875b8e2fc7186c207ad1a5979de4b074c45"} Dec 05 18:11:24 crc kubenswrapper[4753]: I1205 18:11:24.209648 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3a9e2b4d-17c1-4c65-a9fa-95158a124167-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3a9e2b4d-17c1-4c65-a9fa-95158a124167" (UID: "3a9e2b4d-17c1-4c65-a9fa-95158a124167"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 18:11:24 crc kubenswrapper[4753]: I1205 18:11:24.235699 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-whd89/crc-debug-sn7z4" podStartSLOduration=1.585308988 podStartE2EDuration="14.235678307s" podCreationTimestamp="2025-12-05 18:11:10 +0000 UTC" firstStartedPulling="2025-12-05 18:11:10.890007309 +0000 UTC m=+4009.393114315" lastFinishedPulling="2025-12-05 18:11:23.540376628 +0000 UTC m=+4022.043483634" observedRunningTime="2025-12-05 18:11:24.223902763 +0000 UTC m=+4022.727009789" watchObservedRunningTime="2025-12-05 18:11:24.235678307 +0000 UTC m=+4022.738785313" Dec 05 18:11:24 crc kubenswrapper[4753]: I1205 18:11:24.253046 4753 scope.go:117] "RemoveContainer" containerID="fb70f2342ee9f471f342e4fd4515bf6060844495d6b72f68a081c6c3d424003a" Dec 05 18:11:24 crc kubenswrapper[4753]: I1205 18:11:24.282034 4753 scope.go:117] "RemoveContainer" containerID="cfcef0fb8ceb840d3511d426d79c0de9ebf50b12d38e1e3cd5b0d1e35e87706d" Dec 05 18:11:24 crc kubenswrapper[4753]: I1205 18:11:24.287544 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v7mlv\" (UniqueName: \"kubernetes.io/projected/3a9e2b4d-17c1-4c65-a9fa-95158a124167-kube-api-access-v7mlv\") on node \"crc\" DevicePath \"\"" Dec 05 18:11:24 crc kubenswrapper[4753]: I1205 18:11:24.287582 4753 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3a9e2b4d-17c1-4c65-a9fa-95158a124167-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 18:11:24 crc kubenswrapper[4753]: I1205 18:11:24.287598 4753 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3a9e2b4d-17c1-4c65-a9fa-95158a124167-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 18:11:24 crc kubenswrapper[4753]: I1205 18:11:24.545033 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-rgkwc"] Dec 05 18:11:24 crc kubenswrapper[4753]: I1205 18:11:24.557076 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-rgkwc"] Dec 05 18:11:25 crc kubenswrapper[4753]: I1205 18:11:25.731467 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3a9e2b4d-17c1-4c65-a9fa-95158a124167" path="/var/lib/kubelet/pods/3a9e2b4d-17c1-4c65-a9fa-95158a124167/volumes" Dec 05 18:11:35 crc kubenswrapper[4753]: I1205 18:11:35.720742 4753 scope.go:117] "RemoveContainer" containerID="27e4b05ebb2bfc11b01eef3ef253deba1eb448fba7ef2f01139f1ff275bdb469" Dec 05 18:11:35 crc kubenswrapper[4753]: E1205 18:11:35.721521 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 18:11:48 crc kubenswrapper[4753]: I1205 18:11:48.720855 4753 scope.go:117] "RemoveContainer" containerID="27e4b05ebb2bfc11b01eef3ef253deba1eb448fba7ef2f01139f1ff275bdb469" Dec 05 18:11:48 crc kubenswrapper[4753]: E1205 18:11:48.721563 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 18:12:01 crc kubenswrapper[4753]: I1205 18:12:01.726733 4753 scope.go:117] "RemoveContainer" containerID="27e4b05ebb2bfc11b01eef3ef253deba1eb448fba7ef2f01139f1ff275bdb469" Dec 05 18:12:01 crc kubenswrapper[4753]: E1205 18:12:01.727416 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 18:12:07 crc kubenswrapper[4753]: I1205 18:12:07.610996 4753 generic.go:334] "Generic (PLEG): container finished" podID="24555084-11cf-4410-8155-1dd041b18ba4" containerID="37ad614dcc6ddce3f70e1e9d25e43875b8e2fc7186c207ad1a5979de4b074c45" exitCode=0 Dec 05 18:12:07 crc kubenswrapper[4753]: I1205 18:12:07.611055 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-whd89/crc-debug-sn7z4" event={"ID":"24555084-11cf-4410-8155-1dd041b18ba4","Type":"ContainerDied","Data":"37ad614dcc6ddce3f70e1e9d25e43875b8e2fc7186c207ad1a5979de4b074c45"} Dec 05 18:12:08 crc kubenswrapper[4753]: I1205 18:12:08.799464 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-whd89/crc-debug-sn7z4" Dec 05 18:12:08 crc kubenswrapper[4753]: I1205 18:12:08.829289 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-whd89/crc-debug-sn7z4"] Dec 05 18:12:08 crc kubenswrapper[4753]: I1205 18:12:08.838418 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-whd89/crc-debug-sn7z4"] Dec 05 18:12:08 crc kubenswrapper[4753]: I1205 18:12:08.897895 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/24555084-11cf-4410-8155-1dd041b18ba4-host\") pod \"24555084-11cf-4410-8155-1dd041b18ba4\" (UID: \"24555084-11cf-4410-8155-1dd041b18ba4\") " Dec 05 18:12:08 crc kubenswrapper[4753]: I1205 18:12:08.897954 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2fr8n\" (UniqueName: \"kubernetes.io/projected/24555084-11cf-4410-8155-1dd041b18ba4-kube-api-access-2fr8n\") pod \"24555084-11cf-4410-8155-1dd041b18ba4\" (UID: \"24555084-11cf-4410-8155-1dd041b18ba4\") " Dec 05 18:12:08 crc kubenswrapper[4753]: I1205 18:12:08.898020 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/24555084-11cf-4410-8155-1dd041b18ba4-host" (OuterVolumeSpecName: "host") pod "24555084-11cf-4410-8155-1dd041b18ba4" (UID: "24555084-11cf-4410-8155-1dd041b18ba4"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 18:12:08 crc kubenswrapper[4753]: I1205 18:12:08.898509 4753 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/24555084-11cf-4410-8155-1dd041b18ba4-host\") on node \"crc\" DevicePath \"\"" Dec 05 18:12:08 crc kubenswrapper[4753]: I1205 18:12:08.904179 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/24555084-11cf-4410-8155-1dd041b18ba4-kube-api-access-2fr8n" (OuterVolumeSpecName: "kube-api-access-2fr8n") pod "24555084-11cf-4410-8155-1dd041b18ba4" (UID: "24555084-11cf-4410-8155-1dd041b18ba4"). InnerVolumeSpecName "kube-api-access-2fr8n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 18:12:09 crc kubenswrapper[4753]: I1205 18:12:09.000454 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2fr8n\" (UniqueName: \"kubernetes.io/projected/24555084-11cf-4410-8155-1dd041b18ba4-kube-api-access-2fr8n\") on node \"crc\" DevicePath \"\"" Dec 05 18:12:09 crc kubenswrapper[4753]: I1205 18:12:09.647443 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="04017c5e80f4028dc75a3e9449bbc2f8bbeb4e57d814a8a106875e2b26e1abd9" Dec 05 18:12:09 crc kubenswrapper[4753]: I1205 18:12:09.647523 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-whd89/crc-debug-sn7z4" Dec 05 18:12:09 crc kubenswrapper[4753]: I1205 18:12:09.740347 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="24555084-11cf-4410-8155-1dd041b18ba4" path="/var/lib/kubelet/pods/24555084-11cf-4410-8155-1dd041b18ba4/volumes" Dec 05 18:12:09 crc kubenswrapper[4753]: I1205 18:12:09.975681 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-whd89/crc-debug-cm7ct"] Dec 05 18:12:09 crc kubenswrapper[4753]: E1205 18:12:09.976058 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a9e2b4d-17c1-4c65-a9fa-95158a124167" containerName="extract-utilities" Dec 05 18:12:09 crc kubenswrapper[4753]: I1205 18:12:09.976069 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a9e2b4d-17c1-4c65-a9fa-95158a124167" containerName="extract-utilities" Dec 05 18:12:09 crc kubenswrapper[4753]: E1205 18:12:09.976086 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a9e2b4d-17c1-4c65-a9fa-95158a124167" containerName="extract-content" Dec 05 18:12:09 crc kubenswrapper[4753]: I1205 18:12:09.976094 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a9e2b4d-17c1-4c65-a9fa-95158a124167" containerName="extract-content" Dec 05 18:12:09 crc kubenswrapper[4753]: E1205 18:12:09.976125 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="24555084-11cf-4410-8155-1dd041b18ba4" containerName="container-00" Dec 05 18:12:09 crc kubenswrapper[4753]: I1205 18:12:09.976130 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="24555084-11cf-4410-8155-1dd041b18ba4" containerName="container-00" Dec 05 18:12:09 crc kubenswrapper[4753]: E1205 18:12:09.976172 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a9e2b4d-17c1-4c65-a9fa-95158a124167" containerName="registry-server" Dec 05 18:12:09 crc kubenswrapper[4753]: I1205 18:12:09.976178 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a9e2b4d-17c1-4c65-a9fa-95158a124167" containerName="registry-server" Dec 05 18:12:09 crc kubenswrapper[4753]: I1205 18:12:09.976360 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="24555084-11cf-4410-8155-1dd041b18ba4" containerName="container-00" Dec 05 18:12:09 crc kubenswrapper[4753]: I1205 18:12:09.976381 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a9e2b4d-17c1-4c65-a9fa-95158a124167" containerName="registry-server" Dec 05 18:12:09 crc kubenswrapper[4753]: I1205 18:12:09.977113 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-whd89/crc-debug-cm7ct" Dec 05 18:12:10 crc kubenswrapper[4753]: I1205 18:12:10.022666 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/947722d1-a25a-4f77-9d4c-39610b983597-host\") pod \"crc-debug-cm7ct\" (UID: \"947722d1-a25a-4f77-9d4c-39610b983597\") " pod="openshift-must-gather-whd89/crc-debug-cm7ct" Dec 05 18:12:10 crc kubenswrapper[4753]: I1205 18:12:10.023049 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2rl2g\" (UniqueName: \"kubernetes.io/projected/947722d1-a25a-4f77-9d4c-39610b983597-kube-api-access-2rl2g\") pod \"crc-debug-cm7ct\" (UID: \"947722d1-a25a-4f77-9d4c-39610b983597\") " pod="openshift-must-gather-whd89/crc-debug-cm7ct" Dec 05 18:12:10 crc kubenswrapper[4753]: I1205 18:12:10.125765 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/947722d1-a25a-4f77-9d4c-39610b983597-host\") pod \"crc-debug-cm7ct\" (UID: \"947722d1-a25a-4f77-9d4c-39610b983597\") " pod="openshift-must-gather-whd89/crc-debug-cm7ct" Dec 05 18:12:10 crc kubenswrapper[4753]: I1205 18:12:10.125890 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2rl2g\" (UniqueName: \"kubernetes.io/projected/947722d1-a25a-4f77-9d4c-39610b983597-kube-api-access-2rl2g\") pod \"crc-debug-cm7ct\" (UID: \"947722d1-a25a-4f77-9d4c-39610b983597\") " pod="openshift-must-gather-whd89/crc-debug-cm7ct" Dec 05 18:12:10 crc kubenswrapper[4753]: I1205 18:12:10.125972 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/947722d1-a25a-4f77-9d4c-39610b983597-host\") pod \"crc-debug-cm7ct\" (UID: \"947722d1-a25a-4f77-9d4c-39610b983597\") " pod="openshift-must-gather-whd89/crc-debug-cm7ct" Dec 05 18:12:10 crc kubenswrapper[4753]: I1205 18:12:10.148869 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2rl2g\" (UniqueName: \"kubernetes.io/projected/947722d1-a25a-4f77-9d4c-39610b983597-kube-api-access-2rl2g\") pod \"crc-debug-cm7ct\" (UID: \"947722d1-a25a-4f77-9d4c-39610b983597\") " pod="openshift-must-gather-whd89/crc-debug-cm7ct" Dec 05 18:12:10 crc kubenswrapper[4753]: I1205 18:12:10.303536 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-whd89/crc-debug-cm7ct" Dec 05 18:12:10 crc kubenswrapper[4753]: I1205 18:12:10.659708 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-whd89/crc-debug-cm7ct" event={"ID":"947722d1-a25a-4f77-9d4c-39610b983597","Type":"ContainerStarted","Data":"a66acf4eb77bd363c8986af12325e679bcd0b27524e2fe39eba9d460beefba4c"} Dec 05 18:12:10 crc kubenswrapper[4753]: I1205 18:12:10.660045 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-whd89/crc-debug-cm7ct" event={"ID":"947722d1-a25a-4f77-9d4c-39610b983597","Type":"ContainerStarted","Data":"f75fc41d0080f0ff2912c78423fadc41e4c902eebe5dfa7a922a4c16cc75b41d"} Dec 05 18:12:10 crc kubenswrapper[4753]: I1205 18:12:10.681383 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-whd89/crc-debug-cm7ct" podStartSLOduration=1.681361834 podStartE2EDuration="1.681361834s" podCreationTimestamp="2025-12-05 18:12:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 18:12:10.675706493 +0000 UTC m=+4069.178813519" watchObservedRunningTime="2025-12-05 18:12:10.681361834 +0000 UTC m=+4069.184468840" Dec 05 18:12:11 crc kubenswrapper[4753]: I1205 18:12:11.670228 4753 generic.go:334] "Generic (PLEG): container finished" podID="947722d1-a25a-4f77-9d4c-39610b983597" containerID="a66acf4eb77bd363c8986af12325e679bcd0b27524e2fe39eba9d460beefba4c" exitCode=0 Dec 05 18:12:11 crc kubenswrapper[4753]: I1205 18:12:11.670295 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-whd89/crc-debug-cm7ct" event={"ID":"947722d1-a25a-4f77-9d4c-39610b983597","Type":"ContainerDied","Data":"a66acf4eb77bd363c8986af12325e679bcd0b27524e2fe39eba9d460beefba4c"} Dec 05 18:12:12 crc kubenswrapper[4753]: I1205 18:12:12.788006 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-whd89/crc-debug-cm7ct" Dec 05 18:12:12 crc kubenswrapper[4753]: I1205 18:12:12.826424 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-whd89/crc-debug-cm7ct"] Dec 05 18:12:12 crc kubenswrapper[4753]: I1205 18:12:12.852682 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-whd89/crc-debug-cm7ct"] Dec 05 18:12:12 crc kubenswrapper[4753]: I1205 18:12:12.888907 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2rl2g\" (UniqueName: \"kubernetes.io/projected/947722d1-a25a-4f77-9d4c-39610b983597-kube-api-access-2rl2g\") pod \"947722d1-a25a-4f77-9d4c-39610b983597\" (UID: \"947722d1-a25a-4f77-9d4c-39610b983597\") " Dec 05 18:12:12 crc kubenswrapper[4753]: I1205 18:12:12.889024 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/947722d1-a25a-4f77-9d4c-39610b983597-host\") pod \"947722d1-a25a-4f77-9d4c-39610b983597\" (UID: \"947722d1-a25a-4f77-9d4c-39610b983597\") " Dec 05 18:12:12 crc kubenswrapper[4753]: I1205 18:12:12.889702 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/947722d1-a25a-4f77-9d4c-39610b983597-host" (OuterVolumeSpecName: "host") pod "947722d1-a25a-4f77-9d4c-39610b983597" (UID: "947722d1-a25a-4f77-9d4c-39610b983597"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 18:12:12 crc kubenswrapper[4753]: I1205 18:12:12.898501 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/947722d1-a25a-4f77-9d4c-39610b983597-kube-api-access-2rl2g" (OuterVolumeSpecName: "kube-api-access-2rl2g") pod "947722d1-a25a-4f77-9d4c-39610b983597" (UID: "947722d1-a25a-4f77-9d4c-39610b983597"). InnerVolumeSpecName "kube-api-access-2rl2g". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 18:12:12 crc kubenswrapper[4753]: I1205 18:12:12.992072 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2rl2g\" (UniqueName: \"kubernetes.io/projected/947722d1-a25a-4f77-9d4c-39610b983597-kube-api-access-2rl2g\") on node \"crc\" DevicePath \"\"" Dec 05 18:12:12 crc kubenswrapper[4753]: I1205 18:12:12.992111 4753 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/947722d1-a25a-4f77-9d4c-39610b983597-host\") on node \"crc\" DevicePath \"\"" Dec 05 18:12:13 crc kubenswrapper[4753]: I1205 18:12:13.691318 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f75fc41d0080f0ff2912c78423fadc41e4c902eebe5dfa7a922a4c16cc75b41d" Dec 05 18:12:13 crc kubenswrapper[4753]: I1205 18:12:13.691362 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-whd89/crc-debug-cm7ct" Dec 05 18:12:13 crc kubenswrapper[4753]: I1205 18:12:13.734977 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="947722d1-a25a-4f77-9d4c-39610b983597" path="/var/lib/kubelet/pods/947722d1-a25a-4f77-9d4c-39610b983597/volumes" Dec 05 18:12:14 crc kubenswrapper[4753]: I1205 18:12:14.068316 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-whd89/crc-debug-zshz2"] Dec 05 18:12:14 crc kubenswrapper[4753]: E1205 18:12:14.068766 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="947722d1-a25a-4f77-9d4c-39610b983597" containerName="container-00" Dec 05 18:12:14 crc kubenswrapper[4753]: I1205 18:12:14.068805 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="947722d1-a25a-4f77-9d4c-39610b983597" containerName="container-00" Dec 05 18:12:14 crc kubenswrapper[4753]: I1205 18:12:14.069006 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="947722d1-a25a-4f77-9d4c-39610b983597" containerName="container-00" Dec 05 18:12:14 crc kubenswrapper[4753]: I1205 18:12:14.069939 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-whd89/crc-debug-zshz2" Dec 05 18:12:14 crc kubenswrapper[4753]: I1205 18:12:14.113348 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/50e2ad97-a729-4933-a82b-b7e4704b79e8-host\") pod \"crc-debug-zshz2\" (UID: \"50e2ad97-a729-4933-a82b-b7e4704b79e8\") " pod="openshift-must-gather-whd89/crc-debug-zshz2" Dec 05 18:12:14 crc kubenswrapper[4753]: I1205 18:12:14.113733 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gtrrj\" (UniqueName: \"kubernetes.io/projected/50e2ad97-a729-4933-a82b-b7e4704b79e8-kube-api-access-gtrrj\") pod \"crc-debug-zshz2\" (UID: \"50e2ad97-a729-4933-a82b-b7e4704b79e8\") " pod="openshift-must-gather-whd89/crc-debug-zshz2" Dec 05 18:12:14 crc kubenswrapper[4753]: I1205 18:12:14.215738 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gtrrj\" (UniqueName: \"kubernetes.io/projected/50e2ad97-a729-4933-a82b-b7e4704b79e8-kube-api-access-gtrrj\") pod \"crc-debug-zshz2\" (UID: \"50e2ad97-a729-4933-a82b-b7e4704b79e8\") " pod="openshift-must-gather-whd89/crc-debug-zshz2" Dec 05 18:12:14 crc kubenswrapper[4753]: I1205 18:12:14.215885 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/50e2ad97-a729-4933-a82b-b7e4704b79e8-host\") pod \"crc-debug-zshz2\" (UID: \"50e2ad97-a729-4933-a82b-b7e4704b79e8\") " pod="openshift-must-gather-whd89/crc-debug-zshz2" Dec 05 18:12:14 crc kubenswrapper[4753]: I1205 18:12:14.216052 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/50e2ad97-a729-4933-a82b-b7e4704b79e8-host\") pod \"crc-debug-zshz2\" (UID: \"50e2ad97-a729-4933-a82b-b7e4704b79e8\") " pod="openshift-must-gather-whd89/crc-debug-zshz2" Dec 05 18:12:14 crc kubenswrapper[4753]: I1205 18:12:14.684105 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-4wx7p"] Dec 05 18:12:14 crc kubenswrapper[4753]: I1205 18:12:14.686855 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4wx7p" Dec 05 18:12:14 crc kubenswrapper[4753]: I1205 18:12:14.705455 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4wx7p"] Dec 05 18:12:14 crc kubenswrapper[4753]: I1205 18:12:14.724163 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7803104f-80d2-42b1-a088-705c9e192661-utilities\") pod \"certified-operators-4wx7p\" (UID: \"7803104f-80d2-42b1-a088-705c9e192661\") " pod="openshift-marketplace/certified-operators-4wx7p" Dec 05 18:12:14 crc kubenswrapper[4753]: I1205 18:12:14.724265 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7803104f-80d2-42b1-a088-705c9e192661-catalog-content\") pod \"certified-operators-4wx7p\" (UID: \"7803104f-80d2-42b1-a088-705c9e192661\") " pod="openshift-marketplace/certified-operators-4wx7p" Dec 05 18:12:14 crc kubenswrapper[4753]: I1205 18:12:14.724320 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nhjpt\" (UniqueName: \"kubernetes.io/projected/7803104f-80d2-42b1-a088-705c9e192661-kube-api-access-nhjpt\") pod \"certified-operators-4wx7p\" (UID: \"7803104f-80d2-42b1-a088-705c9e192661\") " pod="openshift-marketplace/certified-operators-4wx7p" Dec 05 18:12:14 crc kubenswrapper[4753]: I1205 18:12:14.773949 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gtrrj\" (UniqueName: \"kubernetes.io/projected/50e2ad97-a729-4933-a82b-b7e4704b79e8-kube-api-access-gtrrj\") pod \"crc-debug-zshz2\" (UID: \"50e2ad97-a729-4933-a82b-b7e4704b79e8\") " pod="openshift-must-gather-whd89/crc-debug-zshz2" Dec 05 18:12:14 crc kubenswrapper[4753]: I1205 18:12:14.827442 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nhjpt\" (UniqueName: \"kubernetes.io/projected/7803104f-80d2-42b1-a088-705c9e192661-kube-api-access-nhjpt\") pod \"certified-operators-4wx7p\" (UID: \"7803104f-80d2-42b1-a088-705c9e192661\") " pod="openshift-marketplace/certified-operators-4wx7p" Dec 05 18:12:14 crc kubenswrapper[4753]: I1205 18:12:14.827864 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7803104f-80d2-42b1-a088-705c9e192661-utilities\") pod \"certified-operators-4wx7p\" (UID: \"7803104f-80d2-42b1-a088-705c9e192661\") " pod="openshift-marketplace/certified-operators-4wx7p" Dec 05 18:12:14 crc kubenswrapper[4753]: I1205 18:12:14.827983 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7803104f-80d2-42b1-a088-705c9e192661-catalog-content\") pod \"certified-operators-4wx7p\" (UID: \"7803104f-80d2-42b1-a088-705c9e192661\") " pod="openshift-marketplace/certified-operators-4wx7p" Dec 05 18:12:14 crc kubenswrapper[4753]: I1205 18:12:14.828923 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7803104f-80d2-42b1-a088-705c9e192661-catalog-content\") pod \"certified-operators-4wx7p\" (UID: \"7803104f-80d2-42b1-a088-705c9e192661\") " pod="openshift-marketplace/certified-operators-4wx7p" Dec 05 18:12:14 crc kubenswrapper[4753]: I1205 18:12:14.828953 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7803104f-80d2-42b1-a088-705c9e192661-utilities\") pod \"certified-operators-4wx7p\" (UID: \"7803104f-80d2-42b1-a088-705c9e192661\") " pod="openshift-marketplace/certified-operators-4wx7p" Dec 05 18:12:14 crc kubenswrapper[4753]: I1205 18:12:14.846092 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nhjpt\" (UniqueName: \"kubernetes.io/projected/7803104f-80d2-42b1-a088-705c9e192661-kube-api-access-nhjpt\") pod \"certified-operators-4wx7p\" (UID: \"7803104f-80d2-42b1-a088-705c9e192661\") " pod="openshift-marketplace/certified-operators-4wx7p" Dec 05 18:12:14 crc kubenswrapper[4753]: I1205 18:12:14.990299 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-whd89/crc-debug-zshz2" Dec 05 18:12:15 crc kubenswrapper[4753]: I1205 18:12:15.010333 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4wx7p" Dec 05 18:12:15 crc kubenswrapper[4753]: W1205 18:12:15.035679 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod50e2ad97_a729_4933_a82b_b7e4704b79e8.slice/crio-ff393c0555211c80b00e78f60d3718b386a62b2633dc0bba9af61d5d22e2c819 WatchSource:0}: Error finding container ff393c0555211c80b00e78f60d3718b386a62b2633dc0bba9af61d5d22e2c819: Status 404 returned error can't find the container with id ff393c0555211c80b00e78f60d3718b386a62b2633dc0bba9af61d5d22e2c819 Dec 05 18:12:15 crc kubenswrapper[4753]: I1205 18:12:15.668702 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4wx7p"] Dec 05 18:12:15 crc kubenswrapper[4753]: I1205 18:12:15.712339 4753 generic.go:334] "Generic (PLEG): container finished" podID="50e2ad97-a729-4933-a82b-b7e4704b79e8" containerID="d1afba6a31eca844c6665fbc6a5f9229a3b7a002887f73ba578e66e70f049892" exitCode=0 Dec 05 18:12:15 crc kubenswrapper[4753]: I1205 18:12:15.712411 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-whd89/crc-debug-zshz2" event={"ID":"50e2ad97-a729-4933-a82b-b7e4704b79e8","Type":"ContainerDied","Data":"d1afba6a31eca844c6665fbc6a5f9229a3b7a002887f73ba578e66e70f049892"} Dec 05 18:12:15 crc kubenswrapper[4753]: I1205 18:12:15.712441 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-whd89/crc-debug-zshz2" event={"ID":"50e2ad97-a729-4933-a82b-b7e4704b79e8","Type":"ContainerStarted","Data":"ff393c0555211c80b00e78f60d3718b386a62b2633dc0bba9af61d5d22e2c819"} Dec 05 18:12:15 crc kubenswrapper[4753]: I1205 18:12:15.714160 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4wx7p" event={"ID":"7803104f-80d2-42b1-a088-705c9e192661","Type":"ContainerStarted","Data":"67254ff9023deba616154aed60a8fa5c8dc3fa7c0f4e13813e237c8577047096"} Dec 05 18:12:15 crc kubenswrapper[4753]: I1205 18:12:15.767103 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-whd89/crc-debug-zshz2"] Dec 05 18:12:15 crc kubenswrapper[4753]: I1205 18:12:15.790365 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-whd89/crc-debug-zshz2"] Dec 05 18:12:16 crc kubenswrapper[4753]: I1205 18:12:16.720350 4753 scope.go:117] "RemoveContainer" containerID="27e4b05ebb2bfc11b01eef3ef253deba1eb448fba7ef2f01139f1ff275bdb469" Dec 05 18:12:16 crc kubenswrapper[4753]: E1205 18:12:16.721854 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 18:12:16 crc kubenswrapper[4753]: I1205 18:12:16.729428 4753 generic.go:334] "Generic (PLEG): container finished" podID="7803104f-80d2-42b1-a088-705c9e192661" containerID="67dc6da68091c798913f6d6129fd21c364c30e192b02c5d847b8ec687728e961" exitCode=0 Dec 05 18:12:16 crc kubenswrapper[4753]: I1205 18:12:16.729560 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4wx7p" event={"ID":"7803104f-80d2-42b1-a088-705c9e192661","Type":"ContainerDied","Data":"67dc6da68091c798913f6d6129fd21c364c30e192b02c5d847b8ec687728e961"} Dec 05 18:12:16 crc kubenswrapper[4753]: I1205 18:12:16.865899 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-whd89/crc-debug-zshz2" Dec 05 18:12:16 crc kubenswrapper[4753]: I1205 18:12:16.987802 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gtrrj\" (UniqueName: \"kubernetes.io/projected/50e2ad97-a729-4933-a82b-b7e4704b79e8-kube-api-access-gtrrj\") pod \"50e2ad97-a729-4933-a82b-b7e4704b79e8\" (UID: \"50e2ad97-a729-4933-a82b-b7e4704b79e8\") " Dec 05 18:12:16 crc kubenswrapper[4753]: I1205 18:12:16.987956 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/50e2ad97-a729-4933-a82b-b7e4704b79e8-host\") pod \"50e2ad97-a729-4933-a82b-b7e4704b79e8\" (UID: \"50e2ad97-a729-4933-a82b-b7e4704b79e8\") " Dec 05 18:12:16 crc kubenswrapper[4753]: I1205 18:12:16.988613 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/50e2ad97-a729-4933-a82b-b7e4704b79e8-host" (OuterVolumeSpecName: "host") pod "50e2ad97-a729-4933-a82b-b7e4704b79e8" (UID: "50e2ad97-a729-4933-a82b-b7e4704b79e8"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 18:12:16 crc kubenswrapper[4753]: I1205 18:12:16.993699 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/50e2ad97-a729-4933-a82b-b7e4704b79e8-kube-api-access-gtrrj" (OuterVolumeSpecName: "kube-api-access-gtrrj") pod "50e2ad97-a729-4933-a82b-b7e4704b79e8" (UID: "50e2ad97-a729-4933-a82b-b7e4704b79e8"). InnerVolumeSpecName "kube-api-access-gtrrj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 18:12:17 crc kubenswrapper[4753]: I1205 18:12:17.090891 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gtrrj\" (UniqueName: \"kubernetes.io/projected/50e2ad97-a729-4933-a82b-b7e4704b79e8-kube-api-access-gtrrj\") on node \"crc\" DevicePath \"\"" Dec 05 18:12:17 crc kubenswrapper[4753]: I1205 18:12:17.090923 4753 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/50e2ad97-a729-4933-a82b-b7e4704b79e8-host\") on node \"crc\" DevicePath \"\"" Dec 05 18:12:17 crc kubenswrapper[4753]: I1205 18:12:17.732733 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="50e2ad97-a729-4933-a82b-b7e4704b79e8" path="/var/lib/kubelet/pods/50e2ad97-a729-4933-a82b-b7e4704b79e8/volumes" Dec 05 18:12:17 crc kubenswrapper[4753]: I1205 18:12:17.739022 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-whd89/crc-debug-zshz2" Dec 05 18:12:17 crc kubenswrapper[4753]: I1205 18:12:17.739038 4753 scope.go:117] "RemoveContainer" containerID="d1afba6a31eca844c6665fbc6a5f9229a3b7a002887f73ba578e66e70f049892" Dec 05 18:12:17 crc kubenswrapper[4753]: I1205 18:12:17.742166 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4wx7p" event={"ID":"7803104f-80d2-42b1-a088-705c9e192661","Type":"ContainerStarted","Data":"cdabeb7fd8873fa27d395c3c888177fb91166b666e1e464c515942b212fd6dc4"} Dec 05 18:12:18 crc kubenswrapper[4753]: I1205 18:12:18.754174 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4wx7p" event={"ID":"7803104f-80d2-42b1-a088-705c9e192661","Type":"ContainerDied","Data":"cdabeb7fd8873fa27d395c3c888177fb91166b666e1e464c515942b212fd6dc4"} Dec 05 18:12:18 crc kubenswrapper[4753]: I1205 18:12:18.754554 4753 generic.go:334] "Generic (PLEG): container finished" podID="7803104f-80d2-42b1-a088-705c9e192661" containerID="cdabeb7fd8873fa27d395c3c888177fb91166b666e1e464c515942b212fd6dc4" exitCode=0 Dec 05 18:12:20 crc kubenswrapper[4753]: I1205 18:12:20.803889 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4wx7p" event={"ID":"7803104f-80d2-42b1-a088-705c9e192661","Type":"ContainerStarted","Data":"e24d3a8f467d834aec1a3f83fa6ee2a2e39ed7dfbd143d064c1dab08ec3bb227"} Dec 05 18:12:20 crc kubenswrapper[4753]: I1205 18:12:20.826115 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-4wx7p" podStartSLOduration=4.348278831 podStartE2EDuration="6.826097052s" podCreationTimestamp="2025-12-05 18:12:14 +0000 UTC" firstStartedPulling="2025-12-05 18:12:16.733974858 +0000 UTC m=+4075.237081864" lastFinishedPulling="2025-12-05 18:12:19.211793079 +0000 UTC m=+4077.714900085" observedRunningTime="2025-12-05 18:12:20.820060771 +0000 UTC m=+4079.323167787" watchObservedRunningTime="2025-12-05 18:12:20.826097052 +0000 UTC m=+4079.329204058" Dec 05 18:12:25 crc kubenswrapper[4753]: I1205 18:12:25.011230 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-4wx7p" Dec 05 18:12:25 crc kubenswrapper[4753]: I1205 18:12:25.011697 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-4wx7p" Dec 05 18:12:25 crc kubenswrapper[4753]: I1205 18:12:25.066529 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-4wx7p" Dec 05 18:12:25 crc kubenswrapper[4753]: I1205 18:12:25.905428 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-4wx7p" Dec 05 18:12:25 crc kubenswrapper[4753]: I1205 18:12:25.974315 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4wx7p"] Dec 05 18:12:27 crc kubenswrapper[4753]: I1205 18:12:27.863261 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-4wx7p" podUID="7803104f-80d2-42b1-a088-705c9e192661" containerName="registry-server" containerID="cri-o://e24d3a8f467d834aec1a3f83fa6ee2a2e39ed7dfbd143d064c1dab08ec3bb227" gracePeriod=2 Dec 05 18:12:28 crc kubenswrapper[4753]: I1205 18:12:28.482669 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4wx7p" Dec 05 18:12:28 crc kubenswrapper[4753]: I1205 18:12:28.653569 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nhjpt\" (UniqueName: \"kubernetes.io/projected/7803104f-80d2-42b1-a088-705c9e192661-kube-api-access-nhjpt\") pod \"7803104f-80d2-42b1-a088-705c9e192661\" (UID: \"7803104f-80d2-42b1-a088-705c9e192661\") " Dec 05 18:12:28 crc kubenswrapper[4753]: I1205 18:12:28.653657 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7803104f-80d2-42b1-a088-705c9e192661-catalog-content\") pod \"7803104f-80d2-42b1-a088-705c9e192661\" (UID: \"7803104f-80d2-42b1-a088-705c9e192661\") " Dec 05 18:12:28 crc kubenswrapper[4753]: I1205 18:12:28.653895 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7803104f-80d2-42b1-a088-705c9e192661-utilities\") pod \"7803104f-80d2-42b1-a088-705c9e192661\" (UID: \"7803104f-80d2-42b1-a088-705c9e192661\") " Dec 05 18:12:28 crc kubenswrapper[4753]: I1205 18:12:28.655094 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7803104f-80d2-42b1-a088-705c9e192661-utilities" (OuterVolumeSpecName: "utilities") pod "7803104f-80d2-42b1-a088-705c9e192661" (UID: "7803104f-80d2-42b1-a088-705c9e192661"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 18:12:28 crc kubenswrapper[4753]: I1205 18:12:28.661867 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7803104f-80d2-42b1-a088-705c9e192661-kube-api-access-nhjpt" (OuterVolumeSpecName: "kube-api-access-nhjpt") pod "7803104f-80d2-42b1-a088-705c9e192661" (UID: "7803104f-80d2-42b1-a088-705c9e192661"). InnerVolumeSpecName "kube-api-access-nhjpt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 18:12:28 crc kubenswrapper[4753]: I1205 18:12:28.717043 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7803104f-80d2-42b1-a088-705c9e192661-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7803104f-80d2-42b1-a088-705c9e192661" (UID: "7803104f-80d2-42b1-a088-705c9e192661"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 18:12:28 crc kubenswrapper[4753]: I1205 18:12:28.757251 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nhjpt\" (UniqueName: \"kubernetes.io/projected/7803104f-80d2-42b1-a088-705c9e192661-kube-api-access-nhjpt\") on node \"crc\" DevicePath \"\"" Dec 05 18:12:28 crc kubenswrapper[4753]: I1205 18:12:28.757300 4753 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7803104f-80d2-42b1-a088-705c9e192661-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 18:12:28 crc kubenswrapper[4753]: I1205 18:12:28.757329 4753 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7803104f-80d2-42b1-a088-705c9e192661-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 18:12:28 crc kubenswrapper[4753]: I1205 18:12:28.874357 4753 generic.go:334] "Generic (PLEG): container finished" podID="7803104f-80d2-42b1-a088-705c9e192661" containerID="e24d3a8f467d834aec1a3f83fa6ee2a2e39ed7dfbd143d064c1dab08ec3bb227" exitCode=0 Dec 05 18:12:28 crc kubenswrapper[4753]: I1205 18:12:28.874408 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4wx7p" event={"ID":"7803104f-80d2-42b1-a088-705c9e192661","Type":"ContainerDied","Data":"e24d3a8f467d834aec1a3f83fa6ee2a2e39ed7dfbd143d064c1dab08ec3bb227"} Dec 05 18:12:28 crc kubenswrapper[4753]: I1205 18:12:28.874426 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4wx7p" Dec 05 18:12:28 crc kubenswrapper[4753]: I1205 18:12:28.874453 4753 scope.go:117] "RemoveContainer" containerID="e24d3a8f467d834aec1a3f83fa6ee2a2e39ed7dfbd143d064c1dab08ec3bb227" Dec 05 18:12:28 crc kubenswrapper[4753]: I1205 18:12:28.874441 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4wx7p" event={"ID":"7803104f-80d2-42b1-a088-705c9e192661","Type":"ContainerDied","Data":"67254ff9023deba616154aed60a8fa5c8dc3fa7c0f4e13813e237c8577047096"} Dec 05 18:12:28 crc kubenswrapper[4753]: I1205 18:12:28.898788 4753 scope.go:117] "RemoveContainer" containerID="cdabeb7fd8873fa27d395c3c888177fb91166b666e1e464c515942b212fd6dc4" Dec 05 18:12:28 crc kubenswrapper[4753]: I1205 18:12:28.913866 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4wx7p"] Dec 05 18:12:28 crc kubenswrapper[4753]: I1205 18:12:28.918014 4753 scope.go:117] "RemoveContainer" containerID="67dc6da68091c798913f6d6129fd21c364c30e192b02c5d847b8ec687728e961" Dec 05 18:12:28 crc kubenswrapper[4753]: I1205 18:12:28.923597 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-4wx7p"] Dec 05 18:12:28 crc kubenswrapper[4753]: I1205 18:12:28.971461 4753 scope.go:117] "RemoveContainer" containerID="e24d3a8f467d834aec1a3f83fa6ee2a2e39ed7dfbd143d064c1dab08ec3bb227" Dec 05 18:12:28 crc kubenswrapper[4753]: E1205 18:12:28.971982 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e24d3a8f467d834aec1a3f83fa6ee2a2e39ed7dfbd143d064c1dab08ec3bb227\": container with ID starting with e24d3a8f467d834aec1a3f83fa6ee2a2e39ed7dfbd143d064c1dab08ec3bb227 not found: ID does not exist" containerID="e24d3a8f467d834aec1a3f83fa6ee2a2e39ed7dfbd143d064c1dab08ec3bb227" Dec 05 18:12:28 crc kubenswrapper[4753]: I1205 18:12:28.972023 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e24d3a8f467d834aec1a3f83fa6ee2a2e39ed7dfbd143d064c1dab08ec3bb227"} err="failed to get container status \"e24d3a8f467d834aec1a3f83fa6ee2a2e39ed7dfbd143d064c1dab08ec3bb227\": rpc error: code = NotFound desc = could not find container \"e24d3a8f467d834aec1a3f83fa6ee2a2e39ed7dfbd143d064c1dab08ec3bb227\": container with ID starting with e24d3a8f467d834aec1a3f83fa6ee2a2e39ed7dfbd143d064c1dab08ec3bb227 not found: ID does not exist" Dec 05 18:12:28 crc kubenswrapper[4753]: I1205 18:12:28.972054 4753 scope.go:117] "RemoveContainer" containerID="cdabeb7fd8873fa27d395c3c888177fb91166b666e1e464c515942b212fd6dc4" Dec 05 18:12:28 crc kubenswrapper[4753]: E1205 18:12:28.972545 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cdabeb7fd8873fa27d395c3c888177fb91166b666e1e464c515942b212fd6dc4\": container with ID starting with cdabeb7fd8873fa27d395c3c888177fb91166b666e1e464c515942b212fd6dc4 not found: ID does not exist" containerID="cdabeb7fd8873fa27d395c3c888177fb91166b666e1e464c515942b212fd6dc4" Dec 05 18:12:28 crc kubenswrapper[4753]: I1205 18:12:28.972586 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cdabeb7fd8873fa27d395c3c888177fb91166b666e1e464c515942b212fd6dc4"} err="failed to get container status \"cdabeb7fd8873fa27d395c3c888177fb91166b666e1e464c515942b212fd6dc4\": rpc error: code = NotFound desc = could not find container \"cdabeb7fd8873fa27d395c3c888177fb91166b666e1e464c515942b212fd6dc4\": container with ID starting with cdabeb7fd8873fa27d395c3c888177fb91166b666e1e464c515942b212fd6dc4 not found: ID does not exist" Dec 05 18:12:28 crc kubenswrapper[4753]: I1205 18:12:28.972616 4753 scope.go:117] "RemoveContainer" containerID="67dc6da68091c798913f6d6129fd21c364c30e192b02c5d847b8ec687728e961" Dec 05 18:12:28 crc kubenswrapper[4753]: E1205 18:12:28.973012 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"67dc6da68091c798913f6d6129fd21c364c30e192b02c5d847b8ec687728e961\": container with ID starting with 67dc6da68091c798913f6d6129fd21c364c30e192b02c5d847b8ec687728e961 not found: ID does not exist" containerID="67dc6da68091c798913f6d6129fd21c364c30e192b02c5d847b8ec687728e961" Dec 05 18:12:28 crc kubenswrapper[4753]: I1205 18:12:28.973043 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"67dc6da68091c798913f6d6129fd21c364c30e192b02c5d847b8ec687728e961"} err="failed to get container status \"67dc6da68091c798913f6d6129fd21c364c30e192b02c5d847b8ec687728e961\": rpc error: code = NotFound desc = could not find container \"67dc6da68091c798913f6d6129fd21c364c30e192b02c5d847b8ec687728e961\": container with ID starting with 67dc6da68091c798913f6d6129fd21c364c30e192b02c5d847b8ec687728e961 not found: ID does not exist" Dec 05 18:12:29 crc kubenswrapper[4753]: I1205 18:12:29.739491 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7803104f-80d2-42b1-a088-705c9e192661" path="/var/lib/kubelet/pods/7803104f-80d2-42b1-a088-705c9e192661/volumes" Dec 05 18:12:30 crc kubenswrapper[4753]: I1205 18:12:30.720332 4753 scope.go:117] "RemoveContainer" containerID="27e4b05ebb2bfc11b01eef3ef253deba1eb448fba7ef2f01139f1ff275bdb469" Dec 05 18:12:30 crc kubenswrapper[4753]: E1205 18:12:30.720914 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 18:12:35 crc kubenswrapper[4753]: I1205 18:12:35.389429 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-8brwm"] Dec 05 18:12:35 crc kubenswrapper[4753]: E1205 18:12:35.392919 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50e2ad97-a729-4933-a82b-b7e4704b79e8" containerName="container-00" Dec 05 18:12:35 crc kubenswrapper[4753]: I1205 18:12:35.392956 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="50e2ad97-a729-4933-a82b-b7e4704b79e8" containerName="container-00" Dec 05 18:12:35 crc kubenswrapper[4753]: E1205 18:12:35.392989 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7803104f-80d2-42b1-a088-705c9e192661" containerName="extract-utilities" Dec 05 18:12:35 crc kubenswrapper[4753]: I1205 18:12:35.393002 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="7803104f-80d2-42b1-a088-705c9e192661" containerName="extract-utilities" Dec 05 18:12:35 crc kubenswrapper[4753]: E1205 18:12:35.393049 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7803104f-80d2-42b1-a088-705c9e192661" containerName="extract-content" Dec 05 18:12:35 crc kubenswrapper[4753]: I1205 18:12:35.393061 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="7803104f-80d2-42b1-a088-705c9e192661" containerName="extract-content" Dec 05 18:12:35 crc kubenswrapper[4753]: E1205 18:12:35.393085 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7803104f-80d2-42b1-a088-705c9e192661" containerName="registry-server" Dec 05 18:12:35 crc kubenswrapper[4753]: I1205 18:12:35.393096 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="7803104f-80d2-42b1-a088-705c9e192661" containerName="registry-server" Dec 05 18:12:35 crc kubenswrapper[4753]: I1205 18:12:35.393631 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="50e2ad97-a729-4933-a82b-b7e4704b79e8" containerName="container-00" Dec 05 18:12:35 crc kubenswrapper[4753]: I1205 18:12:35.393747 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="7803104f-80d2-42b1-a088-705c9e192661" containerName="registry-server" Dec 05 18:12:35 crc kubenswrapper[4753]: I1205 18:12:35.396725 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8brwm" Dec 05 18:12:35 crc kubenswrapper[4753]: I1205 18:12:35.401177 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-8brwm"] Dec 05 18:12:35 crc kubenswrapper[4753]: I1205 18:12:35.504681 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5t9np\" (UniqueName: \"kubernetes.io/projected/7c12b0dd-51a3-473d-aabf-4d9fb17caf2e-kube-api-access-5t9np\") pod \"redhat-operators-8brwm\" (UID: \"7c12b0dd-51a3-473d-aabf-4d9fb17caf2e\") " pod="openshift-marketplace/redhat-operators-8brwm" Dec 05 18:12:35 crc kubenswrapper[4753]: I1205 18:12:35.504805 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7c12b0dd-51a3-473d-aabf-4d9fb17caf2e-utilities\") pod \"redhat-operators-8brwm\" (UID: \"7c12b0dd-51a3-473d-aabf-4d9fb17caf2e\") " pod="openshift-marketplace/redhat-operators-8brwm" Dec 05 18:12:35 crc kubenswrapper[4753]: I1205 18:12:35.504862 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7c12b0dd-51a3-473d-aabf-4d9fb17caf2e-catalog-content\") pod \"redhat-operators-8brwm\" (UID: \"7c12b0dd-51a3-473d-aabf-4d9fb17caf2e\") " pod="openshift-marketplace/redhat-operators-8brwm" Dec 05 18:12:35 crc kubenswrapper[4753]: I1205 18:12:35.606479 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5t9np\" (UniqueName: \"kubernetes.io/projected/7c12b0dd-51a3-473d-aabf-4d9fb17caf2e-kube-api-access-5t9np\") pod \"redhat-operators-8brwm\" (UID: \"7c12b0dd-51a3-473d-aabf-4d9fb17caf2e\") " pod="openshift-marketplace/redhat-operators-8brwm" Dec 05 18:12:35 crc kubenswrapper[4753]: I1205 18:12:35.606611 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7c12b0dd-51a3-473d-aabf-4d9fb17caf2e-utilities\") pod \"redhat-operators-8brwm\" (UID: \"7c12b0dd-51a3-473d-aabf-4d9fb17caf2e\") " pod="openshift-marketplace/redhat-operators-8brwm" Dec 05 18:12:35 crc kubenswrapper[4753]: I1205 18:12:35.606674 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7c12b0dd-51a3-473d-aabf-4d9fb17caf2e-catalog-content\") pod \"redhat-operators-8brwm\" (UID: \"7c12b0dd-51a3-473d-aabf-4d9fb17caf2e\") " pod="openshift-marketplace/redhat-operators-8brwm" Dec 05 18:12:35 crc kubenswrapper[4753]: I1205 18:12:35.607087 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7c12b0dd-51a3-473d-aabf-4d9fb17caf2e-utilities\") pod \"redhat-operators-8brwm\" (UID: \"7c12b0dd-51a3-473d-aabf-4d9fb17caf2e\") " pod="openshift-marketplace/redhat-operators-8brwm" Dec 05 18:12:35 crc kubenswrapper[4753]: I1205 18:12:35.607269 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7c12b0dd-51a3-473d-aabf-4d9fb17caf2e-catalog-content\") pod \"redhat-operators-8brwm\" (UID: \"7c12b0dd-51a3-473d-aabf-4d9fb17caf2e\") " pod="openshift-marketplace/redhat-operators-8brwm" Dec 05 18:12:35 crc kubenswrapper[4753]: I1205 18:12:35.664163 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5t9np\" (UniqueName: \"kubernetes.io/projected/7c12b0dd-51a3-473d-aabf-4d9fb17caf2e-kube-api-access-5t9np\") pod \"redhat-operators-8brwm\" (UID: \"7c12b0dd-51a3-473d-aabf-4d9fb17caf2e\") " pod="openshift-marketplace/redhat-operators-8brwm" Dec 05 18:12:35 crc kubenswrapper[4753]: I1205 18:12:35.729354 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8brwm" Dec 05 18:12:36 crc kubenswrapper[4753]: I1205 18:12:36.309047 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-8brwm"] Dec 05 18:12:36 crc kubenswrapper[4753]: I1205 18:12:36.957890 4753 generic.go:334] "Generic (PLEG): container finished" podID="7c12b0dd-51a3-473d-aabf-4d9fb17caf2e" containerID="8630e944a7ce9fb1232bfc82744177b5dee1b4b6e73b1c9c7b6df6ac61ac875a" exitCode=0 Dec 05 18:12:36 crc kubenswrapper[4753]: I1205 18:12:36.958010 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8brwm" event={"ID":"7c12b0dd-51a3-473d-aabf-4d9fb17caf2e","Type":"ContainerDied","Data":"8630e944a7ce9fb1232bfc82744177b5dee1b4b6e73b1c9c7b6df6ac61ac875a"} Dec 05 18:12:36 crc kubenswrapper[4753]: I1205 18:12:36.958268 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8brwm" event={"ID":"7c12b0dd-51a3-473d-aabf-4d9fb17caf2e","Type":"ContainerStarted","Data":"6f45a0d99ffcf6dd67adda305891662434e18089813ac7dc3ce17a9306f51226"} Dec 05 18:12:37 crc kubenswrapper[4753]: I1205 18:12:37.972001 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8brwm" event={"ID":"7c12b0dd-51a3-473d-aabf-4d9fb17caf2e","Type":"ContainerStarted","Data":"e76bdeb8a9c5bd1c4340b76c544ffd831e0c9a5b7464c4115ce9e1bb6150848b"} Dec 05 18:12:41 crc kubenswrapper[4753]: I1205 18:12:41.003321 4753 generic.go:334] "Generic (PLEG): container finished" podID="7c12b0dd-51a3-473d-aabf-4d9fb17caf2e" containerID="e76bdeb8a9c5bd1c4340b76c544ffd831e0c9a5b7464c4115ce9e1bb6150848b" exitCode=0 Dec 05 18:12:41 crc kubenswrapper[4753]: I1205 18:12:41.003365 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8brwm" event={"ID":"7c12b0dd-51a3-473d-aabf-4d9fb17caf2e","Type":"ContainerDied","Data":"e76bdeb8a9c5bd1c4340b76c544ffd831e0c9a5b7464c4115ce9e1bb6150848b"} Dec 05 18:12:42 crc kubenswrapper[4753]: I1205 18:12:42.015245 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8brwm" event={"ID":"7c12b0dd-51a3-473d-aabf-4d9fb17caf2e","Type":"ContainerStarted","Data":"ec602c9cc9cd36976a654e574de2724cd126c5fecdd224d248346595b91fb238"} Dec 05 18:12:42 crc kubenswrapper[4753]: I1205 18:12:42.048852 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-8brwm" podStartSLOduration=2.59271973 podStartE2EDuration="7.048832467s" podCreationTimestamp="2025-12-05 18:12:35 +0000 UTC" firstStartedPulling="2025-12-05 18:12:36.960950192 +0000 UTC m=+4095.464057188" lastFinishedPulling="2025-12-05 18:12:41.417062919 +0000 UTC m=+4099.920169925" observedRunningTime="2025-12-05 18:12:42.038204115 +0000 UTC m=+4100.541311141" watchObservedRunningTime="2025-12-05 18:12:42.048832467 +0000 UTC m=+4100.551939473" Dec 05 18:12:44 crc kubenswrapper[4753]: I1205 18:12:44.720665 4753 scope.go:117] "RemoveContainer" containerID="27e4b05ebb2bfc11b01eef3ef253deba1eb448fba7ef2f01139f1ff275bdb469" Dec 05 18:12:44 crc kubenswrapper[4753]: E1205 18:12:44.721297 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 18:12:45 crc kubenswrapper[4753]: I1205 18:12:45.735335 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-8brwm" Dec 05 18:12:45 crc kubenswrapper[4753]: I1205 18:12:45.735717 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-8brwm" Dec 05 18:12:46 crc kubenswrapper[4753]: I1205 18:12:46.304787 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_f409c39c-6d5a-4950-bd92-2ab8a26ad831/init-config-reloader/0.log" Dec 05 18:12:46 crc kubenswrapper[4753]: I1205 18:12:46.500218 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_f409c39c-6d5a-4950-bd92-2ab8a26ad831/alertmanager/0.log" Dec 05 18:12:46 crc kubenswrapper[4753]: I1205 18:12:46.515726 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_f409c39c-6d5a-4950-bd92-2ab8a26ad831/config-reloader/0.log" Dec 05 18:12:46 crc kubenswrapper[4753]: I1205 18:12:46.579906 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_f409c39c-6d5a-4950-bd92-2ab8a26ad831/init-config-reloader/0.log" Dec 05 18:12:46 crc kubenswrapper[4753]: I1205 18:12:46.713480 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-7f996df5b6-dlvm2_2f3c34d4-e5fc-41ba-a8f0-3a5df24c9013/barbican-api/0.log" Dec 05 18:12:46 crc kubenswrapper[4753]: I1205 18:12:46.733665 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-7f996df5b6-dlvm2_2f3c34d4-e5fc-41ba-a8f0-3a5df24c9013/barbican-api-log/0.log" Dec 05 18:12:46 crc kubenswrapper[4753]: I1205 18:12:46.816456 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-58b544895d-h2wcr_c272889e-62f7-4ce2-8a38-e15945d984d9/barbican-keystone-listener/0.log" Dec 05 18:12:47 crc kubenswrapper[4753]: I1205 18:12:47.048446 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-58b544895d-h2wcr_c272889e-62f7-4ce2-8a38-e15945d984d9/barbican-keystone-listener-log/0.log" Dec 05 18:12:47 crc kubenswrapper[4753]: I1205 18:12:47.113268 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-69b574cd87-g8xmw_491b4a49-e02d-41a2-b783-b3dddbedbc57/barbican-worker-log/0.log" Dec 05 18:12:47 crc kubenswrapper[4753]: I1205 18:12:47.116633 4753 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-8brwm" podUID="7c12b0dd-51a3-473d-aabf-4d9fb17caf2e" containerName="registry-server" probeResult="failure" output=< Dec 05 18:12:47 crc kubenswrapper[4753]: timeout: failed to connect service ":50051" within 1s Dec 05 18:12:47 crc kubenswrapper[4753]: > Dec 05 18:12:47 crc kubenswrapper[4753]: I1205 18:12:47.196826 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-69b574cd87-g8xmw_491b4a49-e02d-41a2-b783-b3dddbedbc57/barbican-worker/0.log" Dec 05 18:12:47 crc kubenswrapper[4753]: I1205 18:12:47.394274 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-fzcvn_a40f9ec8-5379-4355-b524-fed440fdf2d6/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 18:12:47 crc kubenswrapper[4753]: I1205 18:12:47.453003 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_5e8ed50a-c2d1-47b1-92e2-db51cc75a4c7/ceilometer-central-agent/0.log" Dec 05 18:12:47 crc kubenswrapper[4753]: I1205 18:12:47.540040 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_5e8ed50a-c2d1-47b1-92e2-db51cc75a4c7/ceilometer-notification-agent/0.log" Dec 05 18:12:48 crc kubenswrapper[4753]: I1205 18:12:48.294071 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_5e8ed50a-c2d1-47b1-92e2-db51cc75a4c7/sg-core/0.log" Dec 05 18:12:48 crc kubenswrapper[4753]: I1205 18:12:48.301504 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_5e8ed50a-c2d1-47b1-92e2-db51cc75a4c7/proxy-httpd/0.log" Dec 05 18:12:48 crc kubenswrapper[4753]: I1205 18:12:48.333177 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_8adad7e9-de7d-440a-9ac9-55882e2fd944/cinder-api/0.log" Dec 05 18:12:48 crc kubenswrapper[4753]: I1205 18:12:48.487509 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_8adad7e9-de7d-440a-9ac9-55882e2fd944/cinder-api-log/0.log" Dec 05 18:12:48 crc kubenswrapper[4753]: I1205 18:12:48.540112 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_802deda2-7602-46a4-b4d0-25cd167bbdf2/probe/0.log" Dec 05 18:12:48 crc kubenswrapper[4753]: I1205 18:12:48.544311 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_802deda2-7602-46a4-b4d0-25cd167bbdf2/cinder-scheduler/0.log" Dec 05 18:12:48 crc kubenswrapper[4753]: I1205 18:12:48.755689 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-api-0_5f627cef-fbe5-40ef-beeb-e3b08861c449/cloudkitty-api-log/0.log" Dec 05 18:12:48 crc kubenswrapper[4753]: I1205 18:12:48.855562 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-api-0_5f627cef-fbe5-40ef-beeb-e3b08861c449/cloudkitty-api/0.log" Dec 05 18:12:48 crc kubenswrapper[4753]: I1205 18:12:48.970235 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-compactor-0_55b8a3d1-6aee-4a8d-8b7e-5f69ed46970b/loki-compactor/0.log" Dec 05 18:12:49 crc kubenswrapper[4753]: I1205 18:12:49.074699 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-distributor-664b687b54-jj67d_16d583e9-9ea0-4222-a38a-f8e1be33cdae/loki-distributor/0.log" Dec 05 18:12:49 crc kubenswrapper[4753]: I1205 18:12:49.200286 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-gateway-bc75944f-8prh2_389b7205-589e-4027-ae02-ba2287c7e0ed/gateway/0.log" Dec 05 18:12:49 crc kubenswrapper[4753]: I1205 18:12:49.382938 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-gateway-bc75944f-pqqjw_8f82a8e6-b07e-4bf9-801e-04c1f96fe703/gateway/0.log" Dec 05 18:12:49 crc kubenswrapper[4753]: I1205 18:12:49.470234 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-index-gateway-0_93568770-efee-4906-b491-17d0664bfa8b/loki-index-gateway/0.log" Dec 05 18:12:49 crc kubenswrapper[4753]: I1205 18:12:49.801763 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-ingester-0_227cc7e4-602f-4c1e-afa7-0e106d3f505f/loki-ingester/0.log" Dec 05 18:12:49 crc kubenswrapper[4753]: I1205 18:12:49.973845 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-query-frontend-7c8cd744d9-vh5xk_10df4aa4-d920-45d4-9592-72c32d59c312/loki-query-frontend/0.log" Dec 05 18:12:50 crc kubenswrapper[4753]: I1205 18:12:50.259567 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-nhc8m_9e4d29b1-9d77-4744-85ed-e6882651cea9/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 18:12:50 crc kubenswrapper[4753]: I1205 18:12:50.548378 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-sgwrx_476af6d7-20f2-4345-96ad-219ab22e904b/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 18:12:50 crc kubenswrapper[4753]: I1205 18:12:50.575646 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-5475ccd585-nfqd8_d81db6a5-e580-4e70-92bb-437f1c03f5b4/init/0.log" Dec 05 18:12:50 crc kubenswrapper[4753]: I1205 18:12:50.728312 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-querier-5467947bf7-7l6pk_89ac2139-b38d-40b1-939d-b23748c819d0/loki-querier/0.log" Dec 05 18:12:50 crc kubenswrapper[4753]: I1205 18:12:50.879663 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-5475ccd585-nfqd8_d81db6a5-e580-4e70-92bb-437f1c03f5b4/init/0.log" Dec 05 18:12:50 crc kubenswrapper[4753]: I1205 18:12:50.913706 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-5475ccd585-nfqd8_d81db6a5-e580-4e70-92bb-437f1c03f5b4/dnsmasq-dns/0.log" Dec 05 18:12:51 crc kubenswrapper[4753]: I1205 18:12:51.013764 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-2drf7_825da353-e856-45ac-9cff-027d1f16663a/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 18:12:51 crc kubenswrapper[4753]: I1205 18:12:51.159527 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_187c344a-5fdf-47db-b103-de9458e6a58a/glance-log/0.log" Dec 05 18:12:51 crc kubenswrapper[4753]: I1205 18:12:51.271034 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_187c344a-5fdf-47db-b103-de9458e6a58a/glance-httpd/0.log" Dec 05 18:12:51 crc kubenswrapper[4753]: I1205 18:12:51.390396 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_33797bcf-e2f2-4a28-8148-3e027fc342d8/glance-log/0.log" Dec 05 18:12:51 crc kubenswrapper[4753]: I1205 18:12:51.419877 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_33797bcf-e2f2-4a28-8148-3e027fc342d8/glance-httpd/0.log" Dec 05 18:12:51 crc kubenswrapper[4753]: I1205 18:12:51.664305 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-2c89w_3e9951b8-30f1-4aea-947e-d69fcb39bdcf/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 18:12:51 crc kubenswrapper[4753]: I1205 18:12:51.677766 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-7gbc5_5678941d-59cd-487e-82a4-f2cf0bf528a7/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 18:12:51 crc kubenswrapper[4753]: I1205 18:12:51.955104 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29415961-8rgrd_487332b8-9414-4d94-b52e-9deb57aaf729/keystone-cron/0.log" Dec 05 18:12:52 crc kubenswrapper[4753]: I1205 18:12:52.153595 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_37b1ed0b-5977-4294-b5c4-0d9d0abd6520/kube-state-metrics/0.log" Dec 05 18:12:52 crc kubenswrapper[4753]: I1205 18:12:52.285780 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-578b67ddb8-fsb8m_4e39f06b-3be0-4d99-a8b8-627de083ff81/keystone-api/0.log" Dec 05 18:12:52 crc kubenswrapper[4753]: I1205 18:12:52.447574 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-c9d6z_d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 18:12:52 crc kubenswrapper[4753]: I1205 18:12:52.988327 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-6d4f9bcbff-wqwgp_a2d7ec08-5ff8-4470-a4a7-2d830d3f5261/neutron-httpd/0.log" Dec 05 18:12:53 crc kubenswrapper[4753]: I1205 18:12:53.027717 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-6d4f9bcbff-wqwgp_a2d7ec08-5ff8-4470-a4a7-2d830d3f5261/neutron-api/0.log" Dec 05 18:12:53 crc kubenswrapper[4753]: I1205 18:12:53.247428 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-7l2pf_ea5f795b-6ef0-4281-a619-1a89b547e436/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 18:12:53 crc kubenswrapper[4753]: I1205 18:12:53.379437 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-proc-0_11174666-3e97-47ce-90f5-55ee37dddf75/cloudkitty-proc/0.log" Dec 05 18:12:53 crc kubenswrapper[4753]: I1205 18:12:53.848036 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_0295546c-bece-4b19-ae35-8188830dab3b/nova-cell0-conductor-conductor/0.log" Dec 05 18:12:53 crc kubenswrapper[4753]: I1205 18:12:53.871142 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_3aaf728a-8d40-4b45-9f79-a5bb36ee9a57/nova-api-log/0.log" Dec 05 18:12:53 crc kubenswrapper[4753]: I1205 18:12:53.876976 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_3aaf728a-8d40-4b45-9f79-a5bb36ee9a57/nova-api-api/0.log" Dec 05 18:12:54 crc kubenswrapper[4753]: I1205 18:12:54.211994 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_059b378d-55a3-4652-96ac-804b19815e8d/nova-cell1-novncproxy-novncproxy/0.log" Dec 05 18:12:54 crc kubenswrapper[4753]: I1205 18:12:54.214882 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_696fed1a-38d3-459f-b08b-128b8d41d472/nova-cell1-conductor-conductor/0.log" Dec 05 18:12:54 crc kubenswrapper[4753]: I1205 18:12:54.323413 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-tcnxd_352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1/nova-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 18:12:54 crc kubenswrapper[4753]: I1205 18:12:54.680138 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_f56a1bea-c258-4ed6-a43c-2d006aaa4a23/nova-metadata-log/0.log" Dec 05 18:12:54 crc kubenswrapper[4753]: I1205 18:12:54.933009 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_1f6bb960-f1d1-413e-bd11-aa0d1251135f/nova-scheduler-scheduler/0.log" Dec 05 18:12:55 crc kubenswrapper[4753]: I1205 18:12:55.016650 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_4c8d8a7a-38bd-49d9-8f25-5495c32462bc/mysql-bootstrap/0.log" Dec 05 18:12:55 crc kubenswrapper[4753]: I1205 18:12:55.162090 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_4c8d8a7a-38bd-49d9-8f25-5495c32462bc/mysql-bootstrap/0.log" Dec 05 18:12:55 crc kubenswrapper[4753]: I1205 18:12:55.227263 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_4c8d8a7a-38bd-49d9-8f25-5495c32462bc/galera/0.log" Dec 05 18:12:55 crc kubenswrapper[4753]: I1205 18:12:55.391822 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_9df69769-e394-444f-b6e2-e788e989fe92/mysql-bootstrap/0.log" Dec 05 18:12:55 crc kubenswrapper[4753]: I1205 18:12:55.580449 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_9df69769-e394-444f-b6e2-e788e989fe92/mysql-bootstrap/0.log" Dec 05 18:12:55 crc kubenswrapper[4753]: I1205 18:12:55.647293 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_9df69769-e394-444f-b6e2-e788e989fe92/galera/0.log" Dec 05 18:12:55 crc kubenswrapper[4753]: I1205 18:12:55.655316 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_f56a1bea-c258-4ed6-a43c-2d006aaa4a23/nova-metadata-metadata/0.log" Dec 05 18:12:56 crc kubenswrapper[4753]: I1205 18:12:56.107239 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-8brwm" Dec 05 18:12:56 crc kubenswrapper[4753]: I1205 18:12:56.151638 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_1917fbcd-3d32-4ceb-aeab-1119aa3d4771/openstackclient/0.log" Dec 05 18:12:56 crc kubenswrapper[4753]: I1205 18:12:56.169609 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-8brwm" Dec 05 18:12:56 crc kubenswrapper[4753]: I1205 18:12:56.233670 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-8m7cw_9f33836c-96c6-4da3-b2d6-e9c12631f2b4/ovn-controller/0.log" Dec 05 18:12:56 crc kubenswrapper[4753]: I1205 18:12:56.352341 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-8brwm"] Dec 05 18:12:56 crc kubenswrapper[4753]: I1205 18:12:56.383322 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-sp59x_85fb40be-ac2e-404f-912b-2831ae6eb795/openstack-network-exporter/0.log" Dec 05 18:12:56 crc kubenswrapper[4753]: I1205 18:12:56.461818 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-qhphp_284db7f3-ca89-447a-90eb-487d43e49f7d/ovsdb-server-init/0.log" Dec 05 18:12:56 crc kubenswrapper[4753]: I1205 18:12:56.689287 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-qhphp_284db7f3-ca89-447a-90eb-487d43e49f7d/ovsdb-server-init/0.log" Dec 05 18:12:56 crc kubenswrapper[4753]: I1205 18:12:56.706354 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-qhphp_284db7f3-ca89-447a-90eb-487d43e49f7d/ovs-vswitchd/0.log" Dec 05 18:12:56 crc kubenswrapper[4753]: I1205 18:12:56.709975 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-qhphp_284db7f3-ca89-447a-90eb-487d43e49f7d/ovsdb-server/0.log" Dec 05 18:12:56 crc kubenswrapper[4753]: I1205 18:12:56.916031 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-kx88v_e2f8ca40-16d5-4a17-80a1-f5bf12f92d71/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 18:12:56 crc kubenswrapper[4753]: I1205 18:12:56.971169 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_a2c3f794-ac1f-4115-bf82-a43f3a487332/openstack-network-exporter/0.log" Dec 05 18:12:57 crc kubenswrapper[4753]: I1205 18:12:57.004849 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_a2c3f794-ac1f-4115-bf82-a43f3a487332/ovn-northd/0.log" Dec 05 18:12:57 crc kubenswrapper[4753]: I1205 18:12:57.190365 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_f8c13e8e-fec9-49e2-a2b0-5ca0473d2469/openstack-network-exporter/0.log" Dec 05 18:12:57 crc kubenswrapper[4753]: I1205 18:12:57.215668 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-8brwm" podUID="7c12b0dd-51a3-473d-aabf-4d9fb17caf2e" containerName="registry-server" containerID="cri-o://ec602c9cc9cd36976a654e574de2724cd126c5fecdd224d248346595b91fb238" gracePeriod=2 Dec 05 18:12:57 crc kubenswrapper[4753]: I1205 18:12:57.265943 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_f8c13e8e-fec9-49e2-a2b0-5ca0473d2469/ovsdbserver-nb/0.log" Dec 05 18:12:57 crc kubenswrapper[4753]: I1205 18:12:57.412587 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_09e6b220-0a70-4359-93f4-4450b2e458c8/openstack-network-exporter/0.log" Dec 05 18:12:57 crc kubenswrapper[4753]: I1205 18:12:57.417052 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_09e6b220-0a70-4359-93f4-4450b2e458c8/ovsdbserver-sb/0.log" Dec 05 18:12:58 crc kubenswrapper[4753]: I1205 18:12:58.238032 4753 generic.go:334] "Generic (PLEG): container finished" podID="7c12b0dd-51a3-473d-aabf-4d9fb17caf2e" containerID="ec602c9cc9cd36976a654e574de2724cd126c5fecdd224d248346595b91fb238" exitCode=0 Dec 05 18:12:58 crc kubenswrapper[4753]: I1205 18:12:58.238332 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8brwm" event={"ID":"7c12b0dd-51a3-473d-aabf-4d9fb17caf2e","Type":"ContainerDied","Data":"ec602c9cc9cd36976a654e574de2724cd126c5fecdd224d248346595b91fb238"} Dec 05 18:12:58 crc kubenswrapper[4753]: I1205 18:12:58.394475 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-56466dc556-cvwd4_2b9f9c99-c7ba-4689-8218-f61fecf29867/placement-log/0.log" Dec 05 18:12:58 crc kubenswrapper[4753]: I1205 18:12:58.472562 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-56466dc556-cvwd4_2b9f9c99-c7ba-4689-8218-f61fecf29867/placement-api/0.log" Dec 05 18:12:58 crc kubenswrapper[4753]: I1205 18:12:58.472583 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8brwm" Dec 05 18:12:58 crc kubenswrapper[4753]: I1205 18:12:58.501571 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_b4a8b3a8-c966-41e1-bb1e-a054e3c3e189/init-config-reloader/0.log" Dec 05 18:12:58 crc kubenswrapper[4753]: I1205 18:12:58.602103 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5t9np\" (UniqueName: \"kubernetes.io/projected/7c12b0dd-51a3-473d-aabf-4d9fb17caf2e-kube-api-access-5t9np\") pod \"7c12b0dd-51a3-473d-aabf-4d9fb17caf2e\" (UID: \"7c12b0dd-51a3-473d-aabf-4d9fb17caf2e\") " Dec 05 18:12:58 crc kubenswrapper[4753]: I1205 18:12:58.602279 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7c12b0dd-51a3-473d-aabf-4d9fb17caf2e-utilities\") pod \"7c12b0dd-51a3-473d-aabf-4d9fb17caf2e\" (UID: \"7c12b0dd-51a3-473d-aabf-4d9fb17caf2e\") " Dec 05 18:12:58 crc kubenswrapper[4753]: I1205 18:12:58.602381 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7c12b0dd-51a3-473d-aabf-4d9fb17caf2e-catalog-content\") pod \"7c12b0dd-51a3-473d-aabf-4d9fb17caf2e\" (UID: \"7c12b0dd-51a3-473d-aabf-4d9fb17caf2e\") " Dec 05 18:12:58 crc kubenswrapper[4753]: I1205 18:12:58.605422 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7c12b0dd-51a3-473d-aabf-4d9fb17caf2e-utilities" (OuterVolumeSpecName: "utilities") pod "7c12b0dd-51a3-473d-aabf-4d9fb17caf2e" (UID: "7c12b0dd-51a3-473d-aabf-4d9fb17caf2e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 18:12:58 crc kubenswrapper[4753]: I1205 18:12:58.610030 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7c12b0dd-51a3-473d-aabf-4d9fb17caf2e-kube-api-access-5t9np" (OuterVolumeSpecName: "kube-api-access-5t9np") pod "7c12b0dd-51a3-473d-aabf-4d9fb17caf2e" (UID: "7c12b0dd-51a3-473d-aabf-4d9fb17caf2e"). InnerVolumeSpecName "kube-api-access-5t9np". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 18:12:58 crc kubenswrapper[4753]: I1205 18:12:58.704486 4753 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7c12b0dd-51a3-473d-aabf-4d9fb17caf2e-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 18:12:58 crc kubenswrapper[4753]: I1205 18:12:58.704518 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5t9np\" (UniqueName: \"kubernetes.io/projected/7c12b0dd-51a3-473d-aabf-4d9fb17caf2e-kube-api-access-5t9np\") on node \"crc\" DevicePath \"\"" Dec 05 18:12:58 crc kubenswrapper[4753]: I1205 18:12:58.720078 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7c12b0dd-51a3-473d-aabf-4d9fb17caf2e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7c12b0dd-51a3-473d-aabf-4d9fb17caf2e" (UID: "7c12b0dd-51a3-473d-aabf-4d9fb17caf2e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 18:12:58 crc kubenswrapper[4753]: I1205 18:12:58.737919 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_b4a8b3a8-c966-41e1-bb1e-a054e3c3e189/init-config-reloader/0.log" Dec 05 18:12:58 crc kubenswrapper[4753]: I1205 18:12:58.761363 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_b4a8b3a8-c966-41e1-bb1e-a054e3c3e189/config-reloader/0.log" Dec 05 18:12:58 crc kubenswrapper[4753]: I1205 18:12:58.800387 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_b4a8b3a8-c966-41e1-bb1e-a054e3c3e189/thanos-sidecar/0.log" Dec 05 18:12:58 crc kubenswrapper[4753]: I1205 18:12:58.805894 4753 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7c12b0dd-51a3-473d-aabf-4d9fb17caf2e-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 18:12:58 crc kubenswrapper[4753]: I1205 18:12:58.805991 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_b4a8b3a8-c966-41e1-bb1e-a054e3c3e189/prometheus/0.log" Dec 05 18:12:59 crc kubenswrapper[4753]: I1205 18:12:59.048049 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_7a1f2600-cf85-45c5-8263-89810b0ba7ce/setup-container/0.log" Dec 05 18:12:59 crc kubenswrapper[4753]: I1205 18:12:59.253312 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-8brwm" event={"ID":"7c12b0dd-51a3-473d-aabf-4d9fb17caf2e","Type":"ContainerDied","Data":"6f45a0d99ffcf6dd67adda305891662434e18089813ac7dc3ce17a9306f51226"} Dec 05 18:12:59 crc kubenswrapper[4753]: I1205 18:12:59.253369 4753 scope.go:117] "RemoveContainer" containerID="ec602c9cc9cd36976a654e574de2724cd126c5fecdd224d248346595b91fb238" Dec 05 18:12:59 crc kubenswrapper[4753]: I1205 18:12:59.253502 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-8brwm" Dec 05 18:12:59 crc kubenswrapper[4753]: I1205 18:12:59.288895 4753 scope.go:117] "RemoveContainer" containerID="e76bdeb8a9c5bd1c4340b76c544ffd831e0c9a5b7464c4115ce9e1bb6150848b" Dec 05 18:12:59 crc kubenswrapper[4753]: I1205 18:12:59.291433 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-8brwm"] Dec 05 18:12:59 crc kubenswrapper[4753]: I1205 18:12:59.302759 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-8brwm"] Dec 05 18:12:59 crc kubenswrapper[4753]: I1205 18:12:59.322963 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_7a1f2600-cf85-45c5-8263-89810b0ba7ce/rabbitmq/0.log" Dec 05 18:12:59 crc kubenswrapper[4753]: I1205 18:12:59.333906 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_2bbb2b1a-5cf9-497c-9471-13ba1314167b/setup-container/0.log" Dec 05 18:12:59 crc kubenswrapper[4753]: I1205 18:12:59.334006 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_7a1f2600-cf85-45c5-8263-89810b0ba7ce/setup-container/0.log" Dec 05 18:12:59 crc kubenswrapper[4753]: I1205 18:12:59.353979 4753 scope.go:117] "RemoveContainer" containerID="8630e944a7ce9fb1232bfc82744177b5dee1b4b6e73b1c9c7b6df6ac61ac875a" Dec 05 18:12:59 crc kubenswrapper[4753]: I1205 18:12:59.683332 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_2bbb2b1a-5cf9-497c-9471-13ba1314167b/rabbitmq/0.log" Dec 05 18:12:59 crc kubenswrapper[4753]: I1205 18:12:59.686209 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-qwwlp_87333285-bec9-4c68-b2f7-307fee899fe4/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 18:12:59 crc kubenswrapper[4753]: I1205 18:12:59.689260 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_2bbb2b1a-5cf9-497c-9471-13ba1314167b/setup-container/0.log" Dec 05 18:12:59 crc kubenswrapper[4753]: I1205 18:12:59.726690 4753 scope.go:117] "RemoveContainer" containerID="27e4b05ebb2bfc11b01eef3ef253deba1eb448fba7ef2f01139f1ff275bdb469" Dec 05 18:12:59 crc kubenswrapper[4753]: E1205 18:12:59.730383 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 18:12:59 crc kubenswrapper[4753]: I1205 18:12:59.734260 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7c12b0dd-51a3-473d-aabf-4d9fb17caf2e" path="/var/lib/kubelet/pods/7c12b0dd-51a3-473d-aabf-4d9fb17caf2e/volumes" Dec 05 18:12:59 crc kubenswrapper[4753]: I1205 18:12:59.904829 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-k2657_228dc9b7-ee9e-48b8-bf86-f4265863f94a/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 18:12:59 crc kubenswrapper[4753]: I1205 18:12:59.983864 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-vjwb2_d4b7df21-a189-41c8-9e93-c43d0eb552c5/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 18:13:00 crc kubenswrapper[4753]: I1205 18:13:00.169312 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-6z7rw_21886288-cce7-4e89-8c64-e4f06623f8f3/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 18:13:00 crc kubenswrapper[4753]: I1205 18:13:00.363993 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-lwtd6_7e1f8581-434d-4c92-aa86-f76aa242a2e2/ssh-known-hosts-edpm-deployment/0.log" Dec 05 18:13:00 crc kubenswrapper[4753]: I1205 18:13:00.631344 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-6d854f58c-mvlpx_fc5495c5-a8e0-46c4-82c8-be930b187322/proxy-server/0.log" Dec 05 18:13:00 crc kubenswrapper[4753]: I1205 18:13:00.663721 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-6d854f58c-mvlpx_fc5495c5-a8e0-46c4-82c8-be930b187322/proxy-httpd/0.log" Dec 05 18:13:00 crc kubenswrapper[4753]: I1205 18:13:00.772543 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-z4g8x_3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2/swift-ring-rebalance/0.log" Dec 05 18:13:00 crc kubenswrapper[4753]: I1205 18:13:00.913762 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f4707e97-4f70-42d5-959e-1d2c8a9629e5/account-auditor/0.log" Dec 05 18:13:00 crc kubenswrapper[4753]: I1205 18:13:00.939828 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f4707e97-4f70-42d5-959e-1d2c8a9629e5/account-reaper/0.log" Dec 05 18:13:00 crc kubenswrapper[4753]: I1205 18:13:00.998209 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f4707e97-4f70-42d5-959e-1d2c8a9629e5/account-replicator/0.log" Dec 05 18:13:01 crc kubenswrapper[4753]: I1205 18:13:01.129410 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f4707e97-4f70-42d5-959e-1d2c8a9629e5/container-auditor/0.log" Dec 05 18:13:01 crc kubenswrapper[4753]: I1205 18:13:01.160091 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f4707e97-4f70-42d5-959e-1d2c8a9629e5/account-server/0.log" Dec 05 18:13:01 crc kubenswrapper[4753]: I1205 18:13:01.258083 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f4707e97-4f70-42d5-959e-1d2c8a9629e5/container-replicator/0.log" Dec 05 18:13:01 crc kubenswrapper[4753]: I1205 18:13:01.291920 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f4707e97-4f70-42d5-959e-1d2c8a9629e5/container-server/0.log" Dec 05 18:13:01 crc kubenswrapper[4753]: I1205 18:13:01.359777 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f4707e97-4f70-42d5-959e-1d2c8a9629e5/container-updater/0.log" Dec 05 18:13:01 crc kubenswrapper[4753]: I1205 18:13:01.399243 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f4707e97-4f70-42d5-959e-1d2c8a9629e5/object-auditor/0.log" Dec 05 18:13:01 crc kubenswrapper[4753]: I1205 18:13:01.536465 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f4707e97-4f70-42d5-959e-1d2c8a9629e5/object-expirer/0.log" Dec 05 18:13:01 crc kubenswrapper[4753]: I1205 18:13:01.589028 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f4707e97-4f70-42d5-959e-1d2c8a9629e5/object-replicator/0.log" Dec 05 18:13:01 crc kubenswrapper[4753]: I1205 18:13:01.647399 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f4707e97-4f70-42d5-959e-1d2c8a9629e5/object-server/0.log" Dec 05 18:13:01 crc kubenswrapper[4753]: I1205 18:13:01.710935 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f4707e97-4f70-42d5-959e-1d2c8a9629e5/object-updater/0.log" Dec 05 18:13:01 crc kubenswrapper[4753]: I1205 18:13:01.788505 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f4707e97-4f70-42d5-959e-1d2c8a9629e5/rsync/0.log" Dec 05 18:13:01 crc kubenswrapper[4753]: I1205 18:13:01.832031 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f4707e97-4f70-42d5-959e-1d2c8a9629e5/swift-recon-cron/0.log" Dec 05 18:13:02 crc kubenswrapper[4753]: I1205 18:13:02.016028 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-bq7tl_9b316eb5-2fa8-4582-afdd-0b94dfe44a6f/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 18:13:02 crc kubenswrapper[4753]: I1205 18:13:02.097994 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_230fdd26-f37e-4a32-a261-efdb39dc8de2/tempest-tests-tempest-tests-runner/0.log" Dec 05 18:13:02 crc kubenswrapper[4753]: I1205 18:13:02.279826 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_2c8ba284-0ce6-4fd1-af48-9953b5b14d55/test-operator-logs-container/0.log" Dec 05 18:13:02 crc kubenswrapper[4753]: I1205 18:13:02.386748 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-ngt2w_c239ce8f-d247-46bb-889b-914ff6f8ab64/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 18:13:06 crc kubenswrapper[4753]: I1205 18:13:06.058790 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_8978936d-d71e-4840-9cc4-666746ebeecf/memcached/0.log" Dec 05 18:13:14 crc kubenswrapper[4753]: I1205 18:13:14.720275 4753 scope.go:117] "RemoveContainer" containerID="27e4b05ebb2bfc11b01eef3ef253deba1eb448fba7ef2f01139f1ff275bdb469" Dec 05 18:13:14 crc kubenswrapper[4753]: E1205 18:13:14.722599 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 18:13:23 crc kubenswrapper[4753]: I1205 18:13:23.772050 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-b9z4v"] Dec 05 18:13:23 crc kubenswrapper[4753]: E1205 18:13:23.773139 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c12b0dd-51a3-473d-aabf-4d9fb17caf2e" containerName="registry-server" Dec 05 18:13:23 crc kubenswrapper[4753]: I1205 18:13:23.773175 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c12b0dd-51a3-473d-aabf-4d9fb17caf2e" containerName="registry-server" Dec 05 18:13:23 crc kubenswrapper[4753]: E1205 18:13:23.773206 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c12b0dd-51a3-473d-aabf-4d9fb17caf2e" containerName="extract-content" Dec 05 18:13:23 crc kubenswrapper[4753]: I1205 18:13:23.773215 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c12b0dd-51a3-473d-aabf-4d9fb17caf2e" containerName="extract-content" Dec 05 18:13:23 crc kubenswrapper[4753]: E1205 18:13:23.773233 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c12b0dd-51a3-473d-aabf-4d9fb17caf2e" containerName="extract-utilities" Dec 05 18:13:23 crc kubenswrapper[4753]: I1205 18:13:23.773246 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c12b0dd-51a3-473d-aabf-4d9fb17caf2e" containerName="extract-utilities" Dec 05 18:13:23 crc kubenswrapper[4753]: I1205 18:13:23.773512 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c12b0dd-51a3-473d-aabf-4d9fb17caf2e" containerName="registry-server" Dec 05 18:13:23 crc kubenswrapper[4753]: I1205 18:13:23.775783 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-b9z4v" Dec 05 18:13:23 crc kubenswrapper[4753]: I1205 18:13:23.792672 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-b9z4v"] Dec 05 18:13:23 crc kubenswrapper[4753]: I1205 18:13:23.864862 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/55317d0c-fc49-40d3-9d0d-02dbec190e32-catalog-content\") pod \"community-operators-b9z4v\" (UID: \"55317d0c-fc49-40d3-9d0d-02dbec190e32\") " pod="openshift-marketplace/community-operators-b9z4v" Dec 05 18:13:23 crc kubenswrapper[4753]: I1205 18:13:23.865054 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/55317d0c-fc49-40d3-9d0d-02dbec190e32-utilities\") pod \"community-operators-b9z4v\" (UID: \"55317d0c-fc49-40d3-9d0d-02dbec190e32\") " pod="openshift-marketplace/community-operators-b9z4v" Dec 05 18:13:23 crc kubenswrapper[4753]: I1205 18:13:23.865138 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kvtzz\" (UniqueName: \"kubernetes.io/projected/55317d0c-fc49-40d3-9d0d-02dbec190e32-kube-api-access-kvtzz\") pod \"community-operators-b9z4v\" (UID: \"55317d0c-fc49-40d3-9d0d-02dbec190e32\") " pod="openshift-marketplace/community-operators-b9z4v" Dec 05 18:13:23 crc kubenswrapper[4753]: I1205 18:13:23.967263 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/55317d0c-fc49-40d3-9d0d-02dbec190e32-catalog-content\") pod \"community-operators-b9z4v\" (UID: \"55317d0c-fc49-40d3-9d0d-02dbec190e32\") " pod="openshift-marketplace/community-operators-b9z4v" Dec 05 18:13:23 crc kubenswrapper[4753]: I1205 18:13:23.967605 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/55317d0c-fc49-40d3-9d0d-02dbec190e32-utilities\") pod \"community-operators-b9z4v\" (UID: \"55317d0c-fc49-40d3-9d0d-02dbec190e32\") " pod="openshift-marketplace/community-operators-b9z4v" Dec 05 18:13:23 crc kubenswrapper[4753]: I1205 18:13:23.967778 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/55317d0c-fc49-40d3-9d0d-02dbec190e32-catalog-content\") pod \"community-operators-b9z4v\" (UID: \"55317d0c-fc49-40d3-9d0d-02dbec190e32\") " pod="openshift-marketplace/community-operators-b9z4v" Dec 05 18:13:23 crc kubenswrapper[4753]: I1205 18:13:23.967793 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kvtzz\" (UniqueName: \"kubernetes.io/projected/55317d0c-fc49-40d3-9d0d-02dbec190e32-kube-api-access-kvtzz\") pod \"community-operators-b9z4v\" (UID: \"55317d0c-fc49-40d3-9d0d-02dbec190e32\") " pod="openshift-marketplace/community-operators-b9z4v" Dec 05 18:13:23 crc kubenswrapper[4753]: I1205 18:13:23.968050 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/55317d0c-fc49-40d3-9d0d-02dbec190e32-utilities\") pod \"community-operators-b9z4v\" (UID: \"55317d0c-fc49-40d3-9d0d-02dbec190e32\") " pod="openshift-marketplace/community-operators-b9z4v" Dec 05 18:13:23 crc kubenswrapper[4753]: I1205 18:13:23.989010 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kvtzz\" (UniqueName: \"kubernetes.io/projected/55317d0c-fc49-40d3-9d0d-02dbec190e32-kube-api-access-kvtzz\") pod \"community-operators-b9z4v\" (UID: \"55317d0c-fc49-40d3-9d0d-02dbec190e32\") " pod="openshift-marketplace/community-operators-b9z4v" Dec 05 18:13:24 crc kubenswrapper[4753]: I1205 18:13:24.093838 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-b9z4v" Dec 05 18:13:24 crc kubenswrapper[4753]: I1205 18:13:24.620874 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-b9z4v"] Dec 05 18:13:25 crc kubenswrapper[4753]: I1205 18:13:25.517903 4753 generic.go:334] "Generic (PLEG): container finished" podID="55317d0c-fc49-40d3-9d0d-02dbec190e32" containerID="2d7e51d3cb862732887379d789027a33e55e345ddf4301dc94a8582bf8f92c74" exitCode=0 Dec 05 18:13:25 crc kubenswrapper[4753]: I1205 18:13:25.518544 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b9z4v" event={"ID":"55317d0c-fc49-40d3-9d0d-02dbec190e32","Type":"ContainerDied","Data":"2d7e51d3cb862732887379d789027a33e55e345ddf4301dc94a8582bf8f92c74"} Dec 05 18:13:25 crc kubenswrapper[4753]: I1205 18:13:25.519394 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b9z4v" event={"ID":"55317d0c-fc49-40d3-9d0d-02dbec190e32","Type":"ContainerStarted","Data":"a044b52bac689bc09f8a71b6d0d712239f418c4f0d96b1501fafe9591dfefa94"} Dec 05 18:13:26 crc kubenswrapper[4753]: I1205 18:13:26.528832 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b9z4v" event={"ID":"55317d0c-fc49-40d3-9d0d-02dbec190e32","Type":"ContainerStarted","Data":"22361a3424e7f4649226d373ae079e6d3871f699a001b1ae39ce351965204199"} Dec 05 18:13:27 crc kubenswrapper[4753]: I1205 18:13:27.539908 4753 generic.go:334] "Generic (PLEG): container finished" podID="55317d0c-fc49-40d3-9d0d-02dbec190e32" containerID="22361a3424e7f4649226d373ae079e6d3871f699a001b1ae39ce351965204199" exitCode=0 Dec 05 18:13:27 crc kubenswrapper[4753]: I1205 18:13:27.539986 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b9z4v" event={"ID":"55317d0c-fc49-40d3-9d0d-02dbec190e32","Type":"ContainerDied","Data":"22361a3424e7f4649226d373ae079e6d3871f699a001b1ae39ce351965204199"} Dec 05 18:13:28 crc kubenswrapper[4753]: I1205 18:13:28.551056 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b9z4v" event={"ID":"55317d0c-fc49-40d3-9d0d-02dbec190e32","Type":"ContainerStarted","Data":"31bc383305caa6e827016205ad7282c3e22dfce687e9138c49ea74b268d033b4"} Dec 05 18:13:28 crc kubenswrapper[4753]: I1205 18:13:28.567106 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-b9z4v" podStartSLOduration=3.182724014 podStartE2EDuration="5.567088596s" podCreationTimestamp="2025-12-05 18:13:23 +0000 UTC" firstStartedPulling="2025-12-05 18:13:25.520697311 +0000 UTC m=+4144.023804317" lastFinishedPulling="2025-12-05 18:13:27.905061893 +0000 UTC m=+4146.408168899" observedRunningTime="2025-12-05 18:13:28.565653255 +0000 UTC m=+4147.068760261" watchObservedRunningTime="2025-12-05 18:13:28.567088596 +0000 UTC m=+4147.070195602" Dec 05 18:13:29 crc kubenswrapper[4753]: I1205 18:13:29.720526 4753 scope.go:117] "RemoveContainer" containerID="27e4b05ebb2bfc11b01eef3ef253deba1eb448fba7ef2f01139f1ff275bdb469" Dec 05 18:13:30 crc kubenswrapper[4753]: I1205 18:13:30.573557 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" event={"ID":"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68","Type":"ContainerStarted","Data":"e42f1ec35840b32008ebef2a4fb1cb6bcc42f7dac831f627507ada0e7dee3d18"} Dec 05 18:13:32 crc kubenswrapper[4753]: I1205 18:13:32.319425 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_10518271d11356773275a244d183abb577b9dea821837b29079f1397f09p4qq_e51268e8-feb1-4dcd-8eeb-4e81cc1ced69/util/0.log" Dec 05 18:13:32 crc kubenswrapper[4753]: I1205 18:13:32.423608 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_10518271d11356773275a244d183abb577b9dea821837b29079f1397f09p4qq_e51268e8-feb1-4dcd-8eeb-4e81cc1ced69/util/0.log" Dec 05 18:13:32 crc kubenswrapper[4753]: I1205 18:13:32.442607 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_10518271d11356773275a244d183abb577b9dea821837b29079f1397f09p4qq_e51268e8-feb1-4dcd-8eeb-4e81cc1ced69/pull/0.log" Dec 05 18:13:32 crc kubenswrapper[4753]: I1205 18:13:32.446854 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_10518271d11356773275a244d183abb577b9dea821837b29079f1397f09p4qq_e51268e8-feb1-4dcd-8eeb-4e81cc1ced69/pull/0.log" Dec 05 18:13:32 crc kubenswrapper[4753]: I1205 18:13:32.755347 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_10518271d11356773275a244d183abb577b9dea821837b29079f1397f09p4qq_e51268e8-feb1-4dcd-8eeb-4e81cc1ced69/extract/0.log" Dec 05 18:13:32 crc kubenswrapper[4753]: I1205 18:13:32.761203 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_10518271d11356773275a244d183abb577b9dea821837b29079f1397f09p4qq_e51268e8-feb1-4dcd-8eeb-4e81cc1ced69/pull/0.log" Dec 05 18:13:32 crc kubenswrapper[4753]: I1205 18:13:32.794526 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_10518271d11356773275a244d183abb577b9dea821837b29079f1397f09p4qq_e51268e8-feb1-4dcd-8eeb-4e81cc1ced69/util/0.log" Dec 05 18:13:32 crc kubenswrapper[4753]: I1205 18:13:32.969108 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-k9647_304f4f1a-f42b-4904-9a77-9e26600eb591/kube-rbac-proxy/0.log" Dec 05 18:13:33 crc kubenswrapper[4753]: I1205 18:13:33.031071 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-k9647_304f4f1a-f42b-4904-9a77-9e26600eb591/manager/0.log" Dec 05 18:13:33 crc kubenswrapper[4753]: I1205 18:13:33.135350 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-pkzrf_6afd28dd-749e-409b-93ec-30cd85573a95/kube-rbac-proxy/0.log" Dec 05 18:13:33 crc kubenswrapper[4753]: I1205 18:13:33.256038 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-hhh6x_a5f4bc41-be86-43bd-b9af-d8d8cfac644e/kube-rbac-proxy/0.log" Dec 05 18:13:33 crc kubenswrapper[4753]: I1205 18:13:33.302292 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-pkzrf_6afd28dd-749e-409b-93ec-30cd85573a95/manager/0.log" Dec 05 18:13:33 crc kubenswrapper[4753]: I1205 18:13:33.362999 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-hhh6x_a5f4bc41-be86-43bd-b9af-d8d8cfac644e/manager/0.log" Dec 05 18:13:33 crc kubenswrapper[4753]: I1205 18:13:33.470457 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-g7c45_e485d825-a020-45b2-a642-bba12e1a5112/kube-rbac-proxy/0.log" Dec 05 18:13:33 crc kubenswrapper[4753]: I1205 18:13:33.625952 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-g7c45_e485d825-a020-45b2-a642-bba12e1a5112/manager/0.log" Dec 05 18:13:33 crc kubenswrapper[4753]: I1205 18:13:33.658342 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-n94z9_9472f12d-6c74-422c-8bc9-76a2ca161b77/kube-rbac-proxy/0.log" Dec 05 18:13:33 crc kubenswrapper[4753]: I1205 18:13:33.716490 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-n94z9_9472f12d-6c74-422c-8bc9-76a2ca161b77/manager/0.log" Dec 05 18:13:33 crc kubenswrapper[4753]: I1205 18:13:33.851506 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-9jsb5_9712e3f3-fe07-4f19-b04f-6736375fd440/manager/0.log" Dec 05 18:13:33 crc kubenswrapper[4753]: I1205 18:13:33.883232 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-9jsb5_9712e3f3-fe07-4f19-b04f-6736375fd440/kube-rbac-proxy/0.log" Dec 05 18:13:34 crc kubenswrapper[4753]: I1205 18:13:34.016195 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-j6jtq_e2bab632-3631-4dcf-b337-12982b375999/kube-rbac-proxy/0.log" Dec 05 18:13:34 crc kubenswrapper[4753]: I1205 18:13:34.094080 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-b9z4v" Dec 05 18:13:34 crc kubenswrapper[4753]: I1205 18:13:34.094120 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-b9z4v" Dec 05 18:13:34 crc kubenswrapper[4753]: I1205 18:13:34.147099 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-b9z4v" Dec 05 18:13:34 crc kubenswrapper[4753]: I1205 18:13:34.195026 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-kj4zh_2d850458-6add-4a44-b1c6-7dba1e8993ab/kube-rbac-proxy/0.log" Dec 05 18:13:34 crc kubenswrapper[4753]: I1205 18:13:34.274800 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-j6jtq_e2bab632-3631-4dcf-b337-12982b375999/manager/0.log" Dec 05 18:13:34 crc kubenswrapper[4753]: I1205 18:13:34.320682 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-kj4zh_2d850458-6add-4a44-b1c6-7dba1e8993ab/manager/0.log" Dec 05 18:13:34 crc kubenswrapper[4753]: I1205 18:13:34.444899 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-s6759_a3db9b5b-a4b2-40f5-93a0-84ecb72c1c2e/kube-rbac-proxy/0.log" Dec 05 18:13:34 crc kubenswrapper[4753]: I1205 18:13:34.545193 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-s6759_a3db9b5b-a4b2-40f5-93a0-84ecb72c1c2e/manager/0.log" Dec 05 18:13:34 crc kubenswrapper[4753]: I1205 18:13:34.619475 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-qpd7t_0d335585-bcd8-4ddf-a693-421d6d3bf6d2/kube-rbac-proxy/0.log" Dec 05 18:13:34 crc kubenswrapper[4753]: I1205 18:13:34.663603 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-b9z4v" Dec 05 18:13:34 crc kubenswrapper[4753]: I1205 18:13:34.719201 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-b9z4v"] Dec 05 18:13:34 crc kubenswrapper[4753]: I1205 18:13:34.730241 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-qpd7t_0d335585-bcd8-4ddf-a693-421d6d3bf6d2/manager/0.log" Dec 05 18:13:34 crc kubenswrapper[4753]: I1205 18:13:34.867867 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-s2rgf_85fd7687-c296-460a-a2b2-3da36c97efe6/kube-rbac-proxy/0.log" Dec 05 18:13:34 crc kubenswrapper[4753]: I1205 18:13:34.893962 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-s2rgf_85fd7687-c296-460a-a2b2-3da36c97efe6/manager/0.log" Dec 05 18:13:35 crc kubenswrapper[4753]: I1205 18:13:35.587968 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-vcc5x_09be61f0-7174-4035-a8f9-315ca512dea4/kube-rbac-proxy/0.log" Dec 05 18:13:35 crc kubenswrapper[4753]: I1205 18:13:35.770720 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-hf596_a4590a35-52c3-45a7-ba18-81d2db73c384/kube-rbac-proxy/0.log" Dec 05 18:13:35 crc kubenswrapper[4753]: I1205 18:13:35.793292 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-vcc5x_09be61f0-7174-4035-a8f9-315ca512dea4/manager/0.log" Dec 05 18:13:35 crc kubenswrapper[4753]: I1205 18:13:35.896232 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-hf596_a4590a35-52c3-45a7-ba18-81d2db73c384/manager/0.log" Dec 05 18:13:35 crc kubenswrapper[4753]: I1205 18:13:35.991514 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-72b7n_2f396259-4eaa-465d-9674-9999d750b1f6/manager/0.log" Dec 05 18:13:36 crc kubenswrapper[4753]: I1205 18:13:36.002357 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-72b7n_2f396259-4eaa-465d-9674-9999d750b1f6/kube-rbac-proxy/0.log" Dec 05 18:13:36 crc kubenswrapper[4753]: I1205 18:13:36.070081 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd462vzf_269f75ec-a232-47f3-8cc8-e9e4c8e9717d/kube-rbac-proxy/0.log" Dec 05 18:13:36 crc kubenswrapper[4753]: I1205 18:13:36.166660 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd462vzf_269f75ec-a232-47f3-8cc8-e9e4c8e9717d/manager/0.log" Dec 05 18:13:36 crc kubenswrapper[4753]: I1205 18:13:36.481435 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-5958697dc4-d8ztf_a970c0f5-9ad9-4bf9-b93b-5f6b72cca4ff/operator/0.log" Dec 05 18:13:36 crc kubenswrapper[4753]: I1205 18:13:36.503164 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-5blfl_46c0a8e5-1a5d-4fd6-bf89-a91826a0b99f/registry-server/0.log" Dec 05 18:13:36 crc kubenswrapper[4753]: I1205 18:13:36.584114 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-dhjvc_8e53ed9e-05c6-4a84-894d-85f427a53f72/kube-rbac-proxy/0.log" Dec 05 18:13:36 crc kubenswrapper[4753]: I1205 18:13:36.638482 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-b9z4v" podUID="55317d0c-fc49-40d3-9d0d-02dbec190e32" containerName="registry-server" containerID="cri-o://31bc383305caa6e827016205ad7282c3e22dfce687e9138c49ea74b268d033b4" gracePeriod=2 Dec 05 18:13:36 crc kubenswrapper[4753]: I1205 18:13:36.805296 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-dhjvc_8e53ed9e-05c6-4a84-894d-85f427a53f72/manager/0.log" Dec 05 18:13:36 crc kubenswrapper[4753]: I1205 18:13:36.919388 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-l5jmt_ebc7e3cf-3701-42f9-a6ca-43f11424a0b3/kube-rbac-proxy/0.log" Dec 05 18:13:36 crc kubenswrapper[4753]: I1205 18:13:36.946799 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-l5jmt_ebc7e3cf-3701-42f9-a6ca-43f11424a0b3/manager/0.log" Dec 05 18:13:37 crc kubenswrapper[4753]: I1205 18:13:37.282830 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-b9z4v" Dec 05 18:13:37 crc kubenswrapper[4753]: I1205 18:13:37.448764 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/55317d0c-fc49-40d3-9d0d-02dbec190e32-utilities\") pod \"55317d0c-fc49-40d3-9d0d-02dbec190e32\" (UID: \"55317d0c-fc49-40d3-9d0d-02dbec190e32\") " Dec 05 18:13:37 crc kubenswrapper[4753]: I1205 18:13:37.448818 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/55317d0c-fc49-40d3-9d0d-02dbec190e32-catalog-content\") pod \"55317d0c-fc49-40d3-9d0d-02dbec190e32\" (UID: \"55317d0c-fc49-40d3-9d0d-02dbec190e32\") " Dec 05 18:13:37 crc kubenswrapper[4753]: I1205 18:13:37.448919 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kvtzz\" (UniqueName: \"kubernetes.io/projected/55317d0c-fc49-40d3-9d0d-02dbec190e32-kube-api-access-kvtzz\") pod \"55317d0c-fc49-40d3-9d0d-02dbec190e32\" (UID: \"55317d0c-fc49-40d3-9d0d-02dbec190e32\") " Dec 05 18:13:37 crc kubenswrapper[4753]: I1205 18:13:37.449964 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/55317d0c-fc49-40d3-9d0d-02dbec190e32-utilities" (OuterVolumeSpecName: "utilities") pod "55317d0c-fc49-40d3-9d0d-02dbec190e32" (UID: "55317d0c-fc49-40d3-9d0d-02dbec190e32"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 18:13:37 crc kubenswrapper[4753]: I1205 18:13:37.450959 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-zl7c6_cca913d2-4dbb-4cd8-8575-4af52cc95501/operator/0.log" Dec 05 18:13:37 crc kubenswrapper[4753]: I1205 18:13:37.467462 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/55317d0c-fc49-40d3-9d0d-02dbec190e32-kube-api-access-kvtzz" (OuterVolumeSpecName: "kube-api-access-kvtzz") pod "55317d0c-fc49-40d3-9d0d-02dbec190e32" (UID: "55317d0c-fc49-40d3-9d0d-02dbec190e32"). InnerVolumeSpecName "kube-api-access-kvtzz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 18:13:37 crc kubenswrapper[4753]: I1205 18:13:37.533478 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-6dd6c4f769-lq2pn_ac0841fe-3cba-4397-bfec-67a9cbec6861/manager/0.log" Dec 05 18:13:37 crc kubenswrapper[4753]: I1205 18:13:37.537611 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-8fdbn_9e8cbdbf-2604-4e04-a56a-6f2175c09abe/kube-rbac-proxy/0.log" Dec 05 18:13:37 crc kubenswrapper[4753]: I1205 18:13:37.552214 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/55317d0c-fc49-40d3-9d0d-02dbec190e32-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "55317d0c-fc49-40d3-9d0d-02dbec190e32" (UID: "55317d0c-fc49-40d3-9d0d-02dbec190e32"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 18:13:37 crc kubenswrapper[4753]: I1205 18:13:37.553011 4753 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/55317d0c-fc49-40d3-9d0d-02dbec190e32-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 18:13:37 crc kubenswrapper[4753]: I1205 18:13:37.553043 4753 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/55317d0c-fc49-40d3-9d0d-02dbec190e32-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 18:13:37 crc kubenswrapper[4753]: I1205 18:13:37.553054 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kvtzz\" (UniqueName: \"kubernetes.io/projected/55317d0c-fc49-40d3-9d0d-02dbec190e32-kube-api-access-kvtzz\") on node \"crc\" DevicePath \"\"" Dec 05 18:13:37 crc kubenswrapper[4753]: I1205 18:13:37.651164 4753 generic.go:334] "Generic (PLEG): container finished" podID="55317d0c-fc49-40d3-9d0d-02dbec190e32" containerID="31bc383305caa6e827016205ad7282c3e22dfce687e9138c49ea74b268d033b4" exitCode=0 Dec 05 18:13:37 crc kubenswrapper[4753]: I1205 18:13:37.651302 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-b9z4v" Dec 05 18:13:37 crc kubenswrapper[4753]: I1205 18:13:37.651988 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b9z4v" event={"ID":"55317d0c-fc49-40d3-9d0d-02dbec190e32","Type":"ContainerDied","Data":"31bc383305caa6e827016205ad7282c3e22dfce687e9138c49ea74b268d033b4"} Dec 05 18:13:37 crc kubenswrapper[4753]: I1205 18:13:37.652087 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-b9z4v" event={"ID":"55317d0c-fc49-40d3-9d0d-02dbec190e32","Type":"ContainerDied","Data":"a044b52bac689bc09f8a71b6d0d712239f418c4f0d96b1501fafe9591dfefa94"} Dec 05 18:13:37 crc kubenswrapper[4753]: I1205 18:13:37.652198 4753 scope.go:117] "RemoveContainer" containerID="31bc383305caa6e827016205ad7282c3e22dfce687e9138c49ea74b268d033b4" Dec 05 18:13:37 crc kubenswrapper[4753]: I1205 18:13:37.675938 4753 scope.go:117] "RemoveContainer" containerID="22361a3424e7f4649226d373ae079e6d3871f699a001b1ae39ce351965204199" Dec 05 18:13:37 crc kubenswrapper[4753]: I1205 18:13:37.699781 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-b9z4v"] Dec 05 18:13:37 crc kubenswrapper[4753]: I1205 18:13:37.704108 4753 scope.go:117] "RemoveContainer" containerID="2d7e51d3cb862732887379d789027a33e55e345ddf4301dc94a8582bf8f92c74" Dec 05 18:13:37 crc kubenswrapper[4753]: I1205 18:13:37.713396 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-b9z4v"] Dec 05 18:13:37 crc kubenswrapper[4753]: I1205 18:13:37.736866 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="55317d0c-fc49-40d3-9d0d-02dbec190e32" path="/var/lib/kubelet/pods/55317d0c-fc49-40d3-9d0d-02dbec190e32/volumes" Dec 05 18:13:37 crc kubenswrapper[4753]: I1205 18:13:37.745210 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-8fdbn_9e8cbdbf-2604-4e04-a56a-6f2175c09abe/manager/0.log" Dec 05 18:13:37 crc kubenswrapper[4753]: I1205 18:13:37.775792 4753 scope.go:117] "RemoveContainer" containerID="31bc383305caa6e827016205ad7282c3e22dfce687e9138c49ea74b268d033b4" Dec 05 18:13:37 crc kubenswrapper[4753]: E1205 18:13:37.776489 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"31bc383305caa6e827016205ad7282c3e22dfce687e9138c49ea74b268d033b4\": container with ID starting with 31bc383305caa6e827016205ad7282c3e22dfce687e9138c49ea74b268d033b4 not found: ID does not exist" containerID="31bc383305caa6e827016205ad7282c3e22dfce687e9138c49ea74b268d033b4" Dec 05 18:13:37 crc kubenswrapper[4753]: I1205 18:13:37.776526 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"31bc383305caa6e827016205ad7282c3e22dfce687e9138c49ea74b268d033b4"} err="failed to get container status \"31bc383305caa6e827016205ad7282c3e22dfce687e9138c49ea74b268d033b4\": rpc error: code = NotFound desc = could not find container \"31bc383305caa6e827016205ad7282c3e22dfce687e9138c49ea74b268d033b4\": container with ID starting with 31bc383305caa6e827016205ad7282c3e22dfce687e9138c49ea74b268d033b4 not found: ID does not exist" Dec 05 18:13:37 crc kubenswrapper[4753]: I1205 18:13:37.776549 4753 scope.go:117] "RemoveContainer" containerID="22361a3424e7f4649226d373ae079e6d3871f699a001b1ae39ce351965204199" Dec 05 18:13:37 crc kubenswrapper[4753]: E1205 18:13:37.776833 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"22361a3424e7f4649226d373ae079e6d3871f699a001b1ae39ce351965204199\": container with ID starting with 22361a3424e7f4649226d373ae079e6d3871f699a001b1ae39ce351965204199 not found: ID does not exist" containerID="22361a3424e7f4649226d373ae079e6d3871f699a001b1ae39ce351965204199" Dec 05 18:13:37 crc kubenswrapper[4753]: I1205 18:13:37.776856 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22361a3424e7f4649226d373ae079e6d3871f699a001b1ae39ce351965204199"} err="failed to get container status \"22361a3424e7f4649226d373ae079e6d3871f699a001b1ae39ce351965204199\": rpc error: code = NotFound desc = could not find container \"22361a3424e7f4649226d373ae079e6d3871f699a001b1ae39ce351965204199\": container with ID starting with 22361a3424e7f4649226d373ae079e6d3871f699a001b1ae39ce351965204199 not found: ID does not exist" Dec 05 18:13:37 crc kubenswrapper[4753]: I1205 18:13:37.776869 4753 scope.go:117] "RemoveContainer" containerID="2d7e51d3cb862732887379d789027a33e55e345ddf4301dc94a8582bf8f92c74" Dec 05 18:13:37 crc kubenswrapper[4753]: E1205 18:13:37.777219 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2d7e51d3cb862732887379d789027a33e55e345ddf4301dc94a8582bf8f92c74\": container with ID starting with 2d7e51d3cb862732887379d789027a33e55e345ddf4301dc94a8582bf8f92c74 not found: ID does not exist" containerID="2d7e51d3cb862732887379d789027a33e55e345ddf4301dc94a8582bf8f92c74" Dec 05 18:13:37 crc kubenswrapper[4753]: I1205 18:13:37.777257 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2d7e51d3cb862732887379d789027a33e55e345ddf4301dc94a8582bf8f92c74"} err="failed to get container status \"2d7e51d3cb862732887379d789027a33e55e345ddf4301dc94a8582bf8f92c74\": rpc error: code = NotFound desc = could not find container \"2d7e51d3cb862732887379d789027a33e55e345ddf4301dc94a8582bf8f92c74\": container with ID starting with 2d7e51d3cb862732887379d789027a33e55e345ddf4301dc94a8582bf8f92c74 not found: ID does not exist" Dec 05 18:13:37 crc kubenswrapper[4753]: I1205 18:13:37.777575 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-75c997498-r5zvk_41186805-7b90-44a5-b6d6-fe4b6b4d9a79/kube-rbac-proxy/0.log" Dec 05 18:13:37 crc kubenswrapper[4753]: I1205 18:13:37.940261 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-p5j4k_444bb95c-a503-40a1-a99e-64d04b3c8930/manager/0.log" Dec 05 18:13:38 crc kubenswrapper[4753]: I1205 18:13:38.078243 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-p5j4k_444bb95c-a503-40a1-a99e-64d04b3c8930/kube-rbac-proxy/0.log" Dec 05 18:13:38 crc kubenswrapper[4753]: I1205 18:13:38.207552 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-75c997498-r5zvk_41186805-7b90-44a5-b6d6-fe4b6b4d9a79/manager/0.log" Dec 05 18:13:38 crc kubenswrapper[4753]: I1205 18:13:38.215242 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-7wzfq_7b847f69-4008-4acf-bddf-e0ea5a07b6bd/kube-rbac-proxy/0.log" Dec 05 18:13:38 crc kubenswrapper[4753]: I1205 18:13:38.240256 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-7wzfq_7b847f69-4008-4acf-bddf-e0ea5a07b6bd/manager/0.log" Dec 05 18:13:58 crc kubenswrapper[4753]: I1205 18:13:58.729224 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-pcb9b_27067136-5ba5-407b-a4a5-4d1e8c284564/control-plane-machine-set-operator/0.log" Dec 05 18:13:58 crc kubenswrapper[4753]: I1205 18:13:58.749837 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-vx9s2_5cd4f96b-673e-4518-a8ee-da3ccb7a86b0/kube-rbac-proxy/0.log" Dec 05 18:13:58 crc kubenswrapper[4753]: I1205 18:13:58.883861 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-vx9s2_5cd4f96b-673e-4518-a8ee-da3ccb7a86b0/machine-api-operator/0.log" Dec 05 18:14:11 crc kubenswrapper[4753]: I1205 18:14:11.611194 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-mt28j_2406b66f-c017-41be-b22e-0a1b748b2fff/cert-manager-controller/0.log" Dec 05 18:14:11 crc kubenswrapper[4753]: I1205 18:14:11.751532 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-nqxn4_07264007-14c8-49d7-b4f7-ee34bad54bca/cert-manager-cainjector/0.log" Dec 05 18:14:11 crc kubenswrapper[4753]: I1205 18:14:11.792188 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-scp9f_62c975a0-bab2-45d8-9b51-d4bbdf2a5ea6/cert-manager-webhook/0.log" Dec 05 18:14:25 crc kubenswrapper[4753]: I1205 18:14:25.875857 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7fbb5f6569-hdwrw_eccebd56-3231-40e1-b2f8-3b02547ff479/nmstate-console-plugin/0.log" Dec 05 18:14:26 crc kubenswrapper[4753]: I1205 18:14:26.036019 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-nq5l2_1d89bd55-dfdc-4dc5-94c7-36e3b21d95b3/nmstate-handler/0.log" Dec 05 18:14:26 crc kubenswrapper[4753]: I1205 18:14:26.076824 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-ql66l_3f053a7e-ea2d-4b0e-b0fb-928c0038c436/kube-rbac-proxy/0.log" Dec 05 18:14:26 crc kubenswrapper[4753]: I1205 18:14:26.093780 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-ql66l_3f053a7e-ea2d-4b0e-b0fb-928c0038c436/nmstate-metrics/0.log" Dec 05 18:14:26 crc kubenswrapper[4753]: I1205 18:14:26.230536 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5b5b58f5c8-5fxjp_37867715-d9ff-40d5-9c97-b99fd63be4b9/nmstate-operator/0.log" Dec 05 18:14:26 crc kubenswrapper[4753]: I1205 18:14:26.270828 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-5f6d4c5ccb-mkz6b_dcfb16ac-e2ad-4b15-a3c7-d2c35e950739/nmstate-webhook/0.log" Dec 05 18:14:40 crc kubenswrapper[4753]: I1205 18:14:40.914171 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-69686586d4-rttvr_2d74ebb5-4059-4fcd-beef-f9e7bd2731d4/manager/0.log" Dec 05 18:14:40 crc kubenswrapper[4753]: I1205 18:14:40.925819 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-69686586d4-rttvr_2d74ebb5-4059-4fcd-beef-f9e7bd2731d4/kube-rbac-proxy/0.log" Dec 05 18:14:58 crc kubenswrapper[4753]: I1205 18:14:58.377721 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-q7lzx_23e6e527-684e-4e11-8470-b4149bb4c6cc/kube-rbac-proxy/0.log" Dec 05 18:14:58 crc kubenswrapper[4753]: I1205 18:14:58.481340 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-q7lzx_23e6e527-684e-4e11-8470-b4149bb4c6cc/controller/0.log" Dec 05 18:14:58 crc kubenswrapper[4753]: I1205 18:14:58.727985 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2gprd_bade1527-dbee-4843-b00d-0a41e7c516d9/cp-frr-files/0.log" Dec 05 18:14:58 crc kubenswrapper[4753]: I1205 18:14:58.976997 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2gprd_bade1527-dbee-4843-b00d-0a41e7c516d9/cp-frr-files/0.log" Dec 05 18:14:59 crc kubenswrapper[4753]: I1205 18:14:59.056494 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2gprd_bade1527-dbee-4843-b00d-0a41e7c516d9/cp-metrics/0.log" Dec 05 18:14:59 crc kubenswrapper[4753]: I1205 18:14:59.069546 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2gprd_bade1527-dbee-4843-b00d-0a41e7c516d9/cp-reloader/0.log" Dec 05 18:14:59 crc kubenswrapper[4753]: I1205 18:14:59.098701 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2gprd_bade1527-dbee-4843-b00d-0a41e7c516d9/cp-reloader/0.log" Dec 05 18:14:59 crc kubenswrapper[4753]: I1205 18:14:59.311964 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2gprd_bade1527-dbee-4843-b00d-0a41e7c516d9/cp-reloader/0.log" Dec 05 18:14:59 crc kubenswrapper[4753]: I1205 18:14:59.346999 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2gprd_bade1527-dbee-4843-b00d-0a41e7c516d9/cp-frr-files/0.log" Dec 05 18:14:59 crc kubenswrapper[4753]: I1205 18:14:59.400658 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2gprd_bade1527-dbee-4843-b00d-0a41e7c516d9/cp-metrics/0.log" Dec 05 18:14:59 crc kubenswrapper[4753]: I1205 18:14:59.405229 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2gprd_bade1527-dbee-4843-b00d-0a41e7c516d9/cp-metrics/0.log" Dec 05 18:14:59 crc kubenswrapper[4753]: I1205 18:14:59.548626 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2gprd_bade1527-dbee-4843-b00d-0a41e7c516d9/cp-frr-files/0.log" Dec 05 18:14:59 crc kubenswrapper[4753]: I1205 18:14:59.582173 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2gprd_bade1527-dbee-4843-b00d-0a41e7c516d9/controller/0.log" Dec 05 18:14:59 crc kubenswrapper[4753]: I1205 18:14:59.585708 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2gprd_bade1527-dbee-4843-b00d-0a41e7c516d9/cp-metrics/0.log" Dec 05 18:14:59 crc kubenswrapper[4753]: I1205 18:14:59.589938 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2gprd_bade1527-dbee-4843-b00d-0a41e7c516d9/cp-reloader/0.log" Dec 05 18:14:59 crc kubenswrapper[4753]: I1205 18:14:59.769554 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2gprd_bade1527-dbee-4843-b00d-0a41e7c516d9/frr-metrics/0.log" Dec 05 18:14:59 crc kubenswrapper[4753]: I1205 18:14:59.867771 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2gprd_bade1527-dbee-4843-b00d-0a41e7c516d9/kube-rbac-proxy-frr/0.log" Dec 05 18:14:59 crc kubenswrapper[4753]: I1205 18:14:59.880502 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2gprd_bade1527-dbee-4843-b00d-0a41e7c516d9/kube-rbac-proxy/0.log" Dec 05 18:15:00 crc kubenswrapper[4753]: I1205 18:15:00.007831 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2gprd_bade1527-dbee-4843-b00d-0a41e7c516d9/reloader/0.log" Dec 05 18:15:00 crc kubenswrapper[4753]: I1205 18:15:00.112817 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7fcb986d4-hn9ht_42fa5f2a-00be-462f-b4f2-35e8b89e8a5e/frr-k8s-webhook-server/0.log" Dec 05 18:15:00 crc kubenswrapper[4753]: I1205 18:15:00.184470 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415975-4tp6g"] Dec 05 18:15:00 crc kubenswrapper[4753]: E1205 18:15:00.184920 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55317d0c-fc49-40d3-9d0d-02dbec190e32" containerName="extract-content" Dec 05 18:15:00 crc kubenswrapper[4753]: I1205 18:15:00.184932 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="55317d0c-fc49-40d3-9d0d-02dbec190e32" containerName="extract-content" Dec 05 18:15:00 crc kubenswrapper[4753]: E1205 18:15:00.184949 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55317d0c-fc49-40d3-9d0d-02dbec190e32" containerName="registry-server" Dec 05 18:15:00 crc kubenswrapper[4753]: I1205 18:15:00.184955 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="55317d0c-fc49-40d3-9d0d-02dbec190e32" containerName="registry-server" Dec 05 18:15:00 crc kubenswrapper[4753]: E1205 18:15:00.184976 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55317d0c-fc49-40d3-9d0d-02dbec190e32" containerName="extract-utilities" Dec 05 18:15:00 crc kubenswrapper[4753]: I1205 18:15:00.184982 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="55317d0c-fc49-40d3-9d0d-02dbec190e32" containerName="extract-utilities" Dec 05 18:15:00 crc kubenswrapper[4753]: I1205 18:15:00.185175 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="55317d0c-fc49-40d3-9d0d-02dbec190e32" containerName="registry-server" Dec 05 18:15:00 crc kubenswrapper[4753]: I1205 18:15:00.186817 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415975-4tp6g" Dec 05 18:15:00 crc kubenswrapper[4753]: I1205 18:15:00.189680 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 18:15:00 crc kubenswrapper[4753]: I1205 18:15:00.190754 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 18:15:00 crc kubenswrapper[4753]: I1205 18:15:00.202323 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415975-4tp6g"] Dec 05 18:15:00 crc kubenswrapper[4753]: I1205 18:15:00.249462 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2b5cv\" (UniqueName: \"kubernetes.io/projected/361c3ec0-7e2b-48a2-be96-d9c26c5fa13a-kube-api-access-2b5cv\") pod \"collect-profiles-29415975-4tp6g\" (UID: \"361c3ec0-7e2b-48a2-be96-d9c26c5fa13a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415975-4tp6g" Dec 05 18:15:00 crc kubenswrapper[4753]: I1205 18:15:00.249554 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/361c3ec0-7e2b-48a2-be96-d9c26c5fa13a-config-volume\") pod \"collect-profiles-29415975-4tp6g\" (UID: \"361c3ec0-7e2b-48a2-be96-d9c26c5fa13a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415975-4tp6g" Dec 05 18:15:00 crc kubenswrapper[4753]: I1205 18:15:00.249632 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/361c3ec0-7e2b-48a2-be96-d9c26c5fa13a-secret-volume\") pod \"collect-profiles-29415975-4tp6g\" (UID: \"361c3ec0-7e2b-48a2-be96-d9c26c5fa13a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415975-4tp6g" Dec 05 18:15:00 crc kubenswrapper[4753]: I1205 18:15:00.334006 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-79f6547568-z26rz_44de2355-c97c-4421-87b6-1e7301bf430b/manager/0.log" Dec 05 18:15:00 crc kubenswrapper[4753]: I1205 18:15:00.351513 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2b5cv\" (UniqueName: \"kubernetes.io/projected/361c3ec0-7e2b-48a2-be96-d9c26c5fa13a-kube-api-access-2b5cv\") pod \"collect-profiles-29415975-4tp6g\" (UID: \"361c3ec0-7e2b-48a2-be96-d9c26c5fa13a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415975-4tp6g" Dec 05 18:15:00 crc kubenswrapper[4753]: I1205 18:15:00.351664 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/361c3ec0-7e2b-48a2-be96-d9c26c5fa13a-config-volume\") pod \"collect-profiles-29415975-4tp6g\" (UID: \"361c3ec0-7e2b-48a2-be96-d9c26c5fa13a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415975-4tp6g" Dec 05 18:15:00 crc kubenswrapper[4753]: I1205 18:15:00.351796 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/361c3ec0-7e2b-48a2-be96-d9c26c5fa13a-secret-volume\") pod \"collect-profiles-29415975-4tp6g\" (UID: \"361c3ec0-7e2b-48a2-be96-d9c26c5fa13a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415975-4tp6g" Dec 05 18:15:00 crc kubenswrapper[4753]: I1205 18:15:00.353588 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/361c3ec0-7e2b-48a2-be96-d9c26c5fa13a-config-volume\") pod \"collect-profiles-29415975-4tp6g\" (UID: \"361c3ec0-7e2b-48a2-be96-d9c26c5fa13a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415975-4tp6g" Dec 05 18:15:00 crc kubenswrapper[4753]: I1205 18:15:00.359943 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/361c3ec0-7e2b-48a2-be96-d9c26c5fa13a-secret-volume\") pod \"collect-profiles-29415975-4tp6g\" (UID: \"361c3ec0-7e2b-48a2-be96-d9c26c5fa13a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415975-4tp6g" Dec 05 18:15:00 crc kubenswrapper[4753]: I1205 18:15:00.393629 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2b5cv\" (UniqueName: \"kubernetes.io/projected/361c3ec0-7e2b-48a2-be96-d9c26c5fa13a-kube-api-access-2b5cv\") pod \"collect-profiles-29415975-4tp6g\" (UID: \"361c3ec0-7e2b-48a2-be96-d9c26c5fa13a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415975-4tp6g" Dec 05 18:15:00 crc kubenswrapper[4753]: I1205 18:15:00.526605 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415975-4tp6g" Dec 05 18:15:00 crc kubenswrapper[4753]: I1205 18:15:00.745438 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-7c4d66c645-ptcch_15c925fa-97bb-4d10-b85d-b451adac7306/webhook-server/0.log" Dec 05 18:15:00 crc kubenswrapper[4753]: I1205 18:15:00.931007 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-jls6g_08625193-514e-494a-b64a-75f345cf14bc/kube-rbac-proxy/0.log" Dec 05 18:15:01 crc kubenswrapper[4753]: I1205 18:15:01.114535 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415975-4tp6g"] Dec 05 18:15:01 crc kubenswrapper[4753]: I1205 18:15:01.456180 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2gprd_bade1527-dbee-4843-b00d-0a41e7c516d9/frr/0.log" Dec 05 18:15:01 crc kubenswrapper[4753]: I1205 18:15:01.511340 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-jls6g_08625193-514e-494a-b64a-75f345cf14bc/speaker/0.log" Dec 05 18:15:01 crc kubenswrapper[4753]: I1205 18:15:01.580292 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415975-4tp6g" event={"ID":"361c3ec0-7e2b-48a2-be96-d9c26c5fa13a","Type":"ContainerStarted","Data":"895239dd27befd18cf27ba20cb71b4f73ef0ccb8cf1286266d1e59c0b21e82ec"} Dec 05 18:15:01 crc kubenswrapper[4753]: I1205 18:15:01.580377 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415975-4tp6g" event={"ID":"361c3ec0-7e2b-48a2-be96-d9c26c5fa13a","Type":"ContainerStarted","Data":"b28e65a75138524965f117a4556940f9bc34755568e93915f0505c4c2ef00895"} Dec 05 18:15:01 crc kubenswrapper[4753]: I1205 18:15:01.603444 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29415975-4tp6g" podStartSLOduration=1.603426259 podStartE2EDuration="1.603426259s" podCreationTimestamp="2025-12-05 18:15:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 18:15:01.594515337 +0000 UTC m=+4240.097622333" watchObservedRunningTime="2025-12-05 18:15:01.603426259 +0000 UTC m=+4240.106533265" Dec 05 18:15:02 crc kubenswrapper[4753]: I1205 18:15:02.591659 4753 generic.go:334] "Generic (PLEG): container finished" podID="361c3ec0-7e2b-48a2-be96-d9c26c5fa13a" containerID="895239dd27befd18cf27ba20cb71b4f73ef0ccb8cf1286266d1e59c0b21e82ec" exitCode=0 Dec 05 18:15:02 crc kubenswrapper[4753]: I1205 18:15:02.591877 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415975-4tp6g" event={"ID":"361c3ec0-7e2b-48a2-be96-d9c26c5fa13a","Type":"ContainerDied","Data":"895239dd27befd18cf27ba20cb71b4f73ef0ccb8cf1286266d1e59c0b21e82ec"} Dec 05 18:15:04 crc kubenswrapper[4753]: I1205 18:15:04.175212 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415975-4tp6g" Dec 05 18:15:04 crc kubenswrapper[4753]: I1205 18:15:04.309476 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/361c3ec0-7e2b-48a2-be96-d9c26c5fa13a-config-volume\") pod \"361c3ec0-7e2b-48a2-be96-d9c26c5fa13a\" (UID: \"361c3ec0-7e2b-48a2-be96-d9c26c5fa13a\") " Dec 05 18:15:04 crc kubenswrapper[4753]: I1205 18:15:04.309600 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2b5cv\" (UniqueName: \"kubernetes.io/projected/361c3ec0-7e2b-48a2-be96-d9c26c5fa13a-kube-api-access-2b5cv\") pod \"361c3ec0-7e2b-48a2-be96-d9c26c5fa13a\" (UID: \"361c3ec0-7e2b-48a2-be96-d9c26c5fa13a\") " Dec 05 18:15:04 crc kubenswrapper[4753]: I1205 18:15:04.309633 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/361c3ec0-7e2b-48a2-be96-d9c26c5fa13a-secret-volume\") pod \"361c3ec0-7e2b-48a2-be96-d9c26c5fa13a\" (UID: \"361c3ec0-7e2b-48a2-be96-d9c26c5fa13a\") " Dec 05 18:15:04 crc kubenswrapper[4753]: I1205 18:15:04.310303 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/361c3ec0-7e2b-48a2-be96-d9c26c5fa13a-config-volume" (OuterVolumeSpecName: "config-volume") pod "361c3ec0-7e2b-48a2-be96-d9c26c5fa13a" (UID: "361c3ec0-7e2b-48a2-be96-d9c26c5fa13a"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 18:15:04 crc kubenswrapper[4753]: I1205 18:15:04.315289 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/361c3ec0-7e2b-48a2-be96-d9c26c5fa13a-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "361c3ec0-7e2b-48a2-be96-d9c26c5fa13a" (UID: "361c3ec0-7e2b-48a2-be96-d9c26c5fa13a"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:15:04 crc kubenswrapper[4753]: I1205 18:15:04.320036 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/361c3ec0-7e2b-48a2-be96-d9c26c5fa13a-kube-api-access-2b5cv" (OuterVolumeSpecName: "kube-api-access-2b5cv") pod "361c3ec0-7e2b-48a2-be96-d9c26c5fa13a" (UID: "361c3ec0-7e2b-48a2-be96-d9c26c5fa13a"). InnerVolumeSpecName "kube-api-access-2b5cv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 18:15:04 crc kubenswrapper[4753]: I1205 18:15:04.412133 4753 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/361c3ec0-7e2b-48a2-be96-d9c26c5fa13a-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 18:15:04 crc kubenswrapper[4753]: I1205 18:15:04.412206 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2b5cv\" (UniqueName: \"kubernetes.io/projected/361c3ec0-7e2b-48a2-be96-d9c26c5fa13a-kube-api-access-2b5cv\") on node \"crc\" DevicePath \"\"" Dec 05 18:15:04 crc kubenswrapper[4753]: I1205 18:15:04.412224 4753 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/361c3ec0-7e2b-48a2-be96-d9c26c5fa13a-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 18:15:04 crc kubenswrapper[4753]: I1205 18:15:04.613437 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415975-4tp6g" event={"ID":"361c3ec0-7e2b-48a2-be96-d9c26c5fa13a","Type":"ContainerDied","Data":"b28e65a75138524965f117a4556940f9bc34755568e93915f0505c4c2ef00895"} Dec 05 18:15:04 crc kubenswrapper[4753]: I1205 18:15:04.613501 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b28e65a75138524965f117a4556940f9bc34755568e93915f0505c4c2ef00895" Dec 05 18:15:04 crc kubenswrapper[4753]: I1205 18:15:04.613506 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415975-4tp6g" Dec 05 18:15:04 crc kubenswrapper[4753]: I1205 18:15:04.666432 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415930-t9nll"] Dec 05 18:15:04 crc kubenswrapper[4753]: I1205 18:15:04.675491 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415930-t9nll"] Dec 05 18:15:05 crc kubenswrapper[4753]: I1205 18:15:05.744341 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0eaa216f-034c-4be1-9041-706032f78479" path="/var/lib/kubelet/pods/0eaa216f-034c-4be1-9041-706032f78479/volumes" Dec 05 18:15:17 crc kubenswrapper[4753]: I1205 18:15:17.061062 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f876gj_cd2c3a45-8e4a-47ec-9552-9aeafd8bb61d/util/0.log" Dec 05 18:15:17 crc kubenswrapper[4753]: I1205 18:15:17.276067 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f876gj_cd2c3a45-8e4a-47ec-9552-9aeafd8bb61d/util/0.log" Dec 05 18:15:17 crc kubenswrapper[4753]: I1205 18:15:17.351847 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f876gj_cd2c3a45-8e4a-47ec-9552-9aeafd8bb61d/pull/0.log" Dec 05 18:15:17 crc kubenswrapper[4753]: I1205 18:15:17.361898 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f876gj_cd2c3a45-8e4a-47ec-9552-9aeafd8bb61d/pull/0.log" Dec 05 18:15:17 crc kubenswrapper[4753]: I1205 18:15:17.515417 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f876gj_cd2c3a45-8e4a-47ec-9552-9aeafd8bb61d/util/0.log" Dec 05 18:15:17 crc kubenswrapper[4753]: I1205 18:15:17.548330 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f876gj_cd2c3a45-8e4a-47ec-9552-9aeafd8bb61d/pull/0.log" Dec 05 18:15:17 crc kubenswrapper[4753]: I1205 18:15:17.552504 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f876gj_cd2c3a45-8e4a-47ec-9552-9aeafd8bb61d/extract/0.log" Dec 05 18:15:17 crc kubenswrapper[4753]: I1205 18:15:17.754106 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83bhvd4_83d761d5-40da-46c2-b378-cd1cde770ccd/util/0.log" Dec 05 18:15:18 crc kubenswrapper[4753]: I1205 18:15:18.666056 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83bhvd4_83d761d5-40da-46c2-b378-cd1cde770ccd/pull/0.log" Dec 05 18:15:18 crc kubenswrapper[4753]: I1205 18:15:18.684978 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83bhvd4_83d761d5-40da-46c2-b378-cd1cde770ccd/util/0.log" Dec 05 18:15:18 crc kubenswrapper[4753]: I1205 18:15:18.686557 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83bhvd4_83d761d5-40da-46c2-b378-cd1cde770ccd/pull/0.log" Dec 05 18:15:18 crc kubenswrapper[4753]: I1205 18:15:18.889348 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83bhvd4_83d761d5-40da-46c2-b378-cd1cde770ccd/util/0.log" Dec 05 18:15:18 crc kubenswrapper[4753]: I1205 18:15:18.929484 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83bhvd4_83d761d5-40da-46c2-b378-cd1cde770ccd/extract/0.log" Dec 05 18:15:18 crc kubenswrapper[4753]: I1205 18:15:18.949191 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83bhvd4_83d761d5-40da-46c2-b378-cd1cde770ccd/pull/0.log" Dec 05 18:15:19 crc kubenswrapper[4753]: I1205 18:15:19.094553 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-djnfb_93a67264-59fa-4e32-868c-d4d308a5fed2/extract-utilities/0.log" Dec 05 18:15:19 crc kubenswrapper[4753]: I1205 18:15:19.296890 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-djnfb_93a67264-59fa-4e32-868c-d4d308a5fed2/extract-utilities/0.log" Dec 05 18:15:19 crc kubenswrapper[4753]: I1205 18:15:19.310610 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-djnfb_93a67264-59fa-4e32-868c-d4d308a5fed2/extract-content/0.log" Dec 05 18:15:19 crc kubenswrapper[4753]: I1205 18:15:19.311184 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-djnfb_93a67264-59fa-4e32-868c-d4d308a5fed2/extract-content/0.log" Dec 05 18:15:19 crc kubenswrapper[4753]: I1205 18:15:19.510660 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-djnfb_93a67264-59fa-4e32-868c-d4d308a5fed2/extract-utilities/0.log" Dec 05 18:15:19 crc kubenswrapper[4753]: I1205 18:15:19.661201 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-djnfb_93a67264-59fa-4e32-868c-d4d308a5fed2/extract-content/0.log" Dec 05 18:15:20 crc kubenswrapper[4753]: I1205 18:15:20.120584 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-djnfb_93a67264-59fa-4e32-868c-d4d308a5fed2/registry-server/0.log" Dec 05 18:15:20 crc kubenswrapper[4753]: I1205 18:15:20.129275 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-dsvw6_40a8284b-f439-41a8-a064-9582c9d50ec4/extract-utilities/0.log" Dec 05 18:15:20 crc kubenswrapper[4753]: I1205 18:15:20.341563 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-dsvw6_40a8284b-f439-41a8-a064-9582c9d50ec4/extract-utilities/0.log" Dec 05 18:15:20 crc kubenswrapper[4753]: I1205 18:15:20.356758 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-dsvw6_40a8284b-f439-41a8-a064-9582c9d50ec4/extract-content/0.log" Dec 05 18:15:20 crc kubenswrapper[4753]: I1205 18:15:20.383631 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-dsvw6_40a8284b-f439-41a8-a064-9582c9d50ec4/extract-content/0.log" Dec 05 18:15:20 crc kubenswrapper[4753]: I1205 18:15:20.606565 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-dsvw6_40a8284b-f439-41a8-a064-9582c9d50ec4/extract-content/0.log" Dec 05 18:15:20 crc kubenswrapper[4753]: I1205 18:15:20.610431 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-dsvw6_40a8284b-f439-41a8-a064-9582c9d50ec4/extract-utilities/0.log" Dec 05 18:15:20 crc kubenswrapper[4753]: I1205 18:15:20.652493 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-kr92j_303b3266-3775-4fcb-aac9-432b1fefaedc/marketplace-operator/0.log" Dec 05 18:15:20 crc kubenswrapper[4753]: I1205 18:15:20.846134 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-6ndfl_b29e1fdb-cc3c-412d-9194-8f40a860b5f2/extract-utilities/0.log" Dec 05 18:15:21 crc kubenswrapper[4753]: I1205 18:15:21.108938 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-6ndfl_b29e1fdb-cc3c-412d-9194-8f40a860b5f2/extract-utilities/0.log" Dec 05 18:15:21 crc kubenswrapper[4753]: I1205 18:15:21.132137 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-6ndfl_b29e1fdb-cc3c-412d-9194-8f40a860b5f2/extract-content/0.log" Dec 05 18:15:21 crc kubenswrapper[4753]: I1205 18:15:21.165526 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-6ndfl_b29e1fdb-cc3c-412d-9194-8f40a860b5f2/extract-content/0.log" Dec 05 18:15:21 crc kubenswrapper[4753]: I1205 18:15:21.188299 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-dsvw6_40a8284b-f439-41a8-a064-9582c9d50ec4/registry-server/0.log" Dec 05 18:15:21 crc kubenswrapper[4753]: I1205 18:15:21.346314 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-6ndfl_b29e1fdb-cc3c-412d-9194-8f40a860b5f2/extract-utilities/0.log" Dec 05 18:15:21 crc kubenswrapper[4753]: I1205 18:15:21.501317 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-6ndfl_b29e1fdb-cc3c-412d-9194-8f40a860b5f2/extract-content/0.log" Dec 05 18:15:21 crc kubenswrapper[4753]: I1205 18:15:21.503225 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-6ndfl_b29e1fdb-cc3c-412d-9194-8f40a860b5f2/registry-server/0.log" Dec 05 18:15:21 crc kubenswrapper[4753]: I1205 18:15:21.550845 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-bm2xh_21af1eb9-08c2-4c3c-a3b3-e02577bd18a2/extract-utilities/0.log" Dec 05 18:15:21 crc kubenswrapper[4753]: I1205 18:15:21.648943 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-bm2xh_21af1eb9-08c2-4c3c-a3b3-e02577bd18a2/extract-utilities/0.log" Dec 05 18:15:21 crc kubenswrapper[4753]: I1205 18:15:21.650601 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-bm2xh_21af1eb9-08c2-4c3c-a3b3-e02577bd18a2/extract-content/0.log" Dec 05 18:15:21 crc kubenswrapper[4753]: I1205 18:15:21.693344 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-bm2xh_21af1eb9-08c2-4c3c-a3b3-e02577bd18a2/extract-content/0.log" Dec 05 18:15:21 crc kubenswrapper[4753]: I1205 18:15:21.876024 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-bm2xh_21af1eb9-08c2-4c3c-a3b3-e02577bd18a2/extract-content/0.log" Dec 05 18:15:21 crc kubenswrapper[4753]: I1205 18:15:21.944825 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-bm2xh_21af1eb9-08c2-4c3c-a3b3-e02577bd18a2/extract-utilities/0.log" Dec 05 18:15:22 crc kubenswrapper[4753]: I1205 18:15:22.300021 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-bm2xh_21af1eb9-08c2-4c3c-a3b3-e02577bd18a2/registry-server/0.log" Dec 05 18:15:33 crc kubenswrapper[4753]: I1205 18:15:33.409413 4753 scope.go:117] "RemoveContainer" containerID="05debfed71e08e74e588ce61c0f8cfb802bc9e0de5d319bf1874d396028785f9" Dec 05 18:15:37 crc kubenswrapper[4753]: I1205 18:15:37.857080 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-668cf9dfbb-f4kz7_3a0c0fe6-2fda-4398-8f9b-4ba0b6b52182/prometheus-operator/0.log" Dec 05 18:15:38 crc kubenswrapper[4753]: I1205 18:15:38.105357 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-57647d658-ds52w_96a23d8c-f2af-4e5d-afa5-4734f81f73ef/prometheus-operator-admission-webhook/0.log" Dec 05 18:15:38 crc kubenswrapper[4753]: I1205 18:15:38.159521 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-57647d658-tsxw8_b1a19a2b-30b2-47bd-a4e3-cb23e37e16cf/prometheus-operator-admission-webhook/0.log" Dec 05 18:15:38 crc kubenswrapper[4753]: I1205 18:15:38.308226 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-d8bb48f5d-x8ww2_6b271246-3310-483c-a548-db788331725d/operator/0.log" Dec 05 18:15:38 crc kubenswrapper[4753]: I1205 18:15:38.810765 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5446b9c989-66rht_a1cc3eef-b65a-496f-9cf1-7567825fce78/perses-operator/0.log" Dec 05 18:15:55 crc kubenswrapper[4753]: I1205 18:15:55.151916 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-69686586d4-rttvr_2d74ebb5-4059-4fcd-beef-f9e7bd2731d4/kube-rbac-proxy/0.log" Dec 05 18:15:55 crc kubenswrapper[4753]: I1205 18:15:55.201250 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-69686586d4-rttvr_2d74ebb5-4059-4fcd-beef-f9e7bd2731d4/manager/0.log" Dec 05 18:15:58 crc kubenswrapper[4753]: I1205 18:15:58.978656 4753 patch_prober.go:28] interesting pod/machine-config-daemon-khn68 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 18:15:58 crc kubenswrapper[4753]: I1205 18:15:58.979329 4753 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 18:16:28 crc kubenswrapper[4753]: I1205 18:16:28.763720 4753 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/openstack-galera-0" podUID="9df69769-e394-444f-b6e2-e788e989fe92" containerName="galera" probeResult="failure" output="command timed out" Dec 05 18:16:28 crc kubenswrapper[4753]: I1205 18:16:28.764364 4753 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/openstack-galera-0" podUID="9df69769-e394-444f-b6e2-e788e989fe92" containerName="galera" probeResult="failure" output="command timed out" Dec 05 18:16:28 crc kubenswrapper[4753]: I1205 18:16:28.979052 4753 patch_prober.go:28] interesting pod/machine-config-daemon-khn68 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 18:16:28 crc kubenswrapper[4753]: I1205 18:16:28.979138 4753 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 18:16:58 crc kubenswrapper[4753]: I1205 18:16:58.979687 4753 patch_prober.go:28] interesting pod/machine-config-daemon-khn68 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 18:16:58 crc kubenswrapper[4753]: I1205 18:16:58.980338 4753 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 18:16:58 crc kubenswrapper[4753]: I1205 18:16:58.980404 4753 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-khn68" Dec 05 18:16:58 crc kubenswrapper[4753]: I1205 18:16:58.981341 4753 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e42f1ec35840b32008ebef2a4fb1cb6bcc42f7dac831f627507ada0e7dee3d18"} pod="openshift-machine-config-operator/machine-config-daemon-khn68" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 18:16:58 crc kubenswrapper[4753]: I1205 18:16:58.981429 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerName="machine-config-daemon" containerID="cri-o://e42f1ec35840b32008ebef2a4fb1cb6bcc42f7dac831f627507ada0e7dee3d18" gracePeriod=600 Dec 05 18:16:59 crc kubenswrapper[4753]: I1205 18:16:59.860027 4753 generic.go:334] "Generic (PLEG): container finished" podID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerID="e42f1ec35840b32008ebef2a4fb1cb6bcc42f7dac831f627507ada0e7dee3d18" exitCode=0 Dec 05 18:16:59 crc kubenswrapper[4753]: I1205 18:16:59.860097 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" event={"ID":"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68","Type":"ContainerDied","Data":"e42f1ec35840b32008ebef2a4fb1cb6bcc42f7dac831f627507ada0e7dee3d18"} Dec 05 18:16:59 crc kubenswrapper[4753]: I1205 18:16:59.860608 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" event={"ID":"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68","Type":"ContainerStarted","Data":"cc0f1cc17aa951c21ac505d180d094261b7dca04b8a1e989424ff5d5fd93fc78"} Dec 05 18:16:59 crc kubenswrapper[4753]: I1205 18:16:59.860627 4753 scope.go:117] "RemoveContainer" containerID="27e4b05ebb2bfc11b01eef3ef253deba1eb448fba7ef2f01139f1ff275bdb469" Dec 05 18:17:33 crc kubenswrapper[4753]: I1205 18:17:33.485239 4753 scope.go:117] "RemoveContainer" containerID="37ad614dcc6ddce3f70e1e9d25e43875b8e2fc7186c207ad1a5979de4b074c45" Dec 05 18:17:37 crc kubenswrapper[4753]: I1205 18:17:37.371102 4753 generic.go:334] "Generic (PLEG): container finished" podID="f551b736-2665-40a5-a843-dc960cfe8f06" containerID="c1d23686d38f373dac59c902de472beb8e500c9178572b2dab94c26f575c05af" exitCode=0 Dec 05 18:17:37 crc kubenswrapper[4753]: I1205 18:17:37.371287 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-whd89/must-gather-7jh4t" event={"ID":"f551b736-2665-40a5-a843-dc960cfe8f06","Type":"ContainerDied","Data":"c1d23686d38f373dac59c902de472beb8e500c9178572b2dab94c26f575c05af"} Dec 05 18:17:37 crc kubenswrapper[4753]: I1205 18:17:37.372435 4753 scope.go:117] "RemoveContainer" containerID="c1d23686d38f373dac59c902de472beb8e500c9178572b2dab94c26f575c05af" Dec 05 18:17:37 crc kubenswrapper[4753]: I1205 18:17:37.475592 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-whd89_must-gather-7jh4t_f551b736-2665-40a5-a843-dc960cfe8f06/gather/0.log" Dec 05 18:17:45 crc kubenswrapper[4753]: I1205 18:17:45.290062 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-whd89/must-gather-7jh4t"] Dec 05 18:17:45 crc kubenswrapper[4753]: I1205 18:17:45.290901 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-whd89/must-gather-7jh4t" podUID="f551b736-2665-40a5-a843-dc960cfe8f06" containerName="copy" containerID="cri-o://44e054548b9b769d99c46b93aecd39a16da343ea6e381ba0e75a8e458b46fbb4" gracePeriod=2 Dec 05 18:17:45 crc kubenswrapper[4753]: I1205 18:17:45.305208 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-whd89/must-gather-7jh4t"] Dec 05 18:17:45 crc kubenswrapper[4753]: I1205 18:17:45.460571 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-whd89_must-gather-7jh4t_f551b736-2665-40a5-a843-dc960cfe8f06/copy/0.log" Dec 05 18:17:45 crc kubenswrapper[4753]: I1205 18:17:45.461668 4753 generic.go:334] "Generic (PLEG): container finished" podID="f551b736-2665-40a5-a843-dc960cfe8f06" containerID="44e054548b9b769d99c46b93aecd39a16da343ea6e381ba0e75a8e458b46fbb4" exitCode=143 Dec 05 18:17:45 crc kubenswrapper[4753]: I1205 18:17:45.936706 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-whd89_must-gather-7jh4t_f551b736-2665-40a5-a843-dc960cfe8f06/copy/0.log" Dec 05 18:17:45 crc kubenswrapper[4753]: I1205 18:17:45.937299 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-whd89/must-gather-7jh4t" Dec 05 18:17:46 crc kubenswrapper[4753]: I1205 18:17:46.130865 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/f551b736-2665-40a5-a843-dc960cfe8f06-must-gather-output\") pod \"f551b736-2665-40a5-a843-dc960cfe8f06\" (UID: \"f551b736-2665-40a5-a843-dc960cfe8f06\") " Dec 05 18:17:46 crc kubenswrapper[4753]: I1205 18:17:46.131333 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sv5hr\" (UniqueName: \"kubernetes.io/projected/f551b736-2665-40a5-a843-dc960cfe8f06-kube-api-access-sv5hr\") pod \"f551b736-2665-40a5-a843-dc960cfe8f06\" (UID: \"f551b736-2665-40a5-a843-dc960cfe8f06\") " Dec 05 18:17:46 crc kubenswrapper[4753]: I1205 18:17:46.141447 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f551b736-2665-40a5-a843-dc960cfe8f06-kube-api-access-sv5hr" (OuterVolumeSpecName: "kube-api-access-sv5hr") pod "f551b736-2665-40a5-a843-dc960cfe8f06" (UID: "f551b736-2665-40a5-a843-dc960cfe8f06"). InnerVolumeSpecName "kube-api-access-sv5hr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 18:17:46 crc kubenswrapper[4753]: I1205 18:17:46.234828 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sv5hr\" (UniqueName: \"kubernetes.io/projected/f551b736-2665-40a5-a843-dc960cfe8f06-kube-api-access-sv5hr\") on node \"crc\" DevicePath \"\"" Dec 05 18:17:46 crc kubenswrapper[4753]: I1205 18:17:46.331598 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f551b736-2665-40a5-a843-dc960cfe8f06-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "f551b736-2665-40a5-a843-dc960cfe8f06" (UID: "f551b736-2665-40a5-a843-dc960cfe8f06"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 18:17:46 crc kubenswrapper[4753]: I1205 18:17:46.338180 4753 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/f551b736-2665-40a5-a843-dc960cfe8f06-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 05 18:17:46 crc kubenswrapper[4753]: I1205 18:17:46.476361 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-whd89_must-gather-7jh4t_f551b736-2665-40a5-a843-dc960cfe8f06/copy/0.log" Dec 05 18:17:46 crc kubenswrapper[4753]: I1205 18:17:46.476922 4753 scope.go:117] "RemoveContainer" containerID="44e054548b9b769d99c46b93aecd39a16da343ea6e381ba0e75a8e458b46fbb4" Dec 05 18:17:46 crc kubenswrapper[4753]: I1205 18:17:46.477046 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-whd89/must-gather-7jh4t" Dec 05 18:17:46 crc kubenswrapper[4753]: I1205 18:17:46.520919 4753 scope.go:117] "RemoveContainer" containerID="c1d23686d38f373dac59c902de472beb8e500c9178572b2dab94c26f575c05af" Dec 05 18:17:47 crc kubenswrapper[4753]: I1205 18:17:47.730852 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f551b736-2665-40a5-a843-dc960cfe8f06" path="/var/lib/kubelet/pods/f551b736-2665-40a5-a843-dc960cfe8f06/volumes" Dec 05 18:18:33 crc kubenswrapper[4753]: I1205 18:18:33.614660 4753 scope.go:117] "RemoveContainer" containerID="a66acf4eb77bd363c8986af12325e679bcd0b27524e2fe39eba9d460beefba4c" Dec 05 18:19:28 crc kubenswrapper[4753]: I1205 18:19:28.979619 4753 patch_prober.go:28] interesting pod/machine-config-daemon-khn68 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 18:19:28 crc kubenswrapper[4753]: I1205 18:19:28.980222 4753 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 18:19:58 crc kubenswrapper[4753]: I1205 18:19:58.978689 4753 patch_prober.go:28] interesting pod/machine-config-daemon-khn68 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 18:19:58 crc kubenswrapper[4753]: I1205 18:19:58.979359 4753 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 18:20:28 crc kubenswrapper[4753]: I1205 18:20:28.979455 4753 patch_prober.go:28] interesting pod/machine-config-daemon-khn68 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 18:20:28 crc kubenswrapper[4753]: I1205 18:20:28.980481 4753 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 18:20:28 crc kubenswrapper[4753]: I1205 18:20:28.980582 4753 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-khn68" Dec 05 18:20:28 crc kubenswrapper[4753]: I1205 18:20:28.981718 4753 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"cc0f1cc17aa951c21ac505d180d094261b7dca04b8a1e989424ff5d5fd93fc78"} pod="openshift-machine-config-operator/machine-config-daemon-khn68" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 18:20:28 crc kubenswrapper[4753]: I1205 18:20:28.981828 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerName="machine-config-daemon" containerID="cri-o://cc0f1cc17aa951c21ac505d180d094261b7dca04b8a1e989424ff5d5fd93fc78" gracePeriod=600 Dec 05 18:20:29 crc kubenswrapper[4753]: E1205 18:20:29.128600 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 18:20:29 crc kubenswrapper[4753]: I1205 18:20:29.576235 4753 generic.go:334] "Generic (PLEG): container finished" podID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerID="cc0f1cc17aa951c21ac505d180d094261b7dca04b8a1e989424ff5d5fd93fc78" exitCode=0 Dec 05 18:20:29 crc kubenswrapper[4753]: I1205 18:20:29.576333 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" event={"ID":"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68","Type":"ContainerDied","Data":"cc0f1cc17aa951c21ac505d180d094261b7dca04b8a1e989424ff5d5fd93fc78"} Dec 05 18:20:29 crc kubenswrapper[4753]: I1205 18:20:29.576424 4753 scope.go:117] "RemoveContainer" containerID="e42f1ec35840b32008ebef2a4fb1cb6bcc42f7dac831f627507ada0e7dee3d18" Dec 05 18:20:29 crc kubenswrapper[4753]: I1205 18:20:29.576821 4753 scope.go:117] "RemoveContainer" containerID="cc0f1cc17aa951c21ac505d180d094261b7dca04b8a1e989424ff5d5fd93fc78" Dec 05 18:20:29 crc kubenswrapper[4753]: E1205 18:20:29.578285 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 18:20:40 crc kubenswrapper[4753]: I1205 18:20:40.720985 4753 scope.go:117] "RemoveContainer" containerID="cc0f1cc17aa951c21ac505d180d094261b7dca04b8a1e989424ff5d5fd93fc78" Dec 05 18:20:40 crc kubenswrapper[4753]: E1205 18:20:40.721892 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 18:20:51 crc kubenswrapper[4753]: I1205 18:20:51.902846 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-grnvs/must-gather-8f7zr"] Dec 05 18:20:51 crc kubenswrapper[4753]: E1205 18:20:51.903852 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f551b736-2665-40a5-a843-dc960cfe8f06" containerName="gather" Dec 05 18:20:51 crc kubenswrapper[4753]: I1205 18:20:51.903871 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="f551b736-2665-40a5-a843-dc960cfe8f06" containerName="gather" Dec 05 18:20:51 crc kubenswrapper[4753]: E1205 18:20:51.903915 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f551b736-2665-40a5-a843-dc960cfe8f06" containerName="copy" Dec 05 18:20:51 crc kubenswrapper[4753]: I1205 18:20:51.903928 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="f551b736-2665-40a5-a843-dc960cfe8f06" containerName="copy" Dec 05 18:20:51 crc kubenswrapper[4753]: E1205 18:20:51.903945 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="361c3ec0-7e2b-48a2-be96-d9c26c5fa13a" containerName="collect-profiles" Dec 05 18:20:51 crc kubenswrapper[4753]: I1205 18:20:51.903953 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="361c3ec0-7e2b-48a2-be96-d9c26c5fa13a" containerName="collect-profiles" Dec 05 18:20:51 crc kubenswrapper[4753]: I1205 18:20:51.904232 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="361c3ec0-7e2b-48a2-be96-d9c26c5fa13a" containerName="collect-profiles" Dec 05 18:20:51 crc kubenswrapper[4753]: I1205 18:20:51.904255 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="f551b736-2665-40a5-a843-dc960cfe8f06" containerName="gather" Dec 05 18:20:51 crc kubenswrapper[4753]: I1205 18:20:51.904268 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="f551b736-2665-40a5-a843-dc960cfe8f06" containerName="copy" Dec 05 18:20:51 crc kubenswrapper[4753]: I1205 18:20:51.911697 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-grnvs/must-gather-8f7zr" Dec 05 18:20:51 crc kubenswrapper[4753]: I1205 18:20:51.919594 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-grnvs"/"openshift-service-ca.crt" Dec 05 18:20:51 crc kubenswrapper[4753]: I1205 18:20:51.921771 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-grnvs"/"kube-root-ca.crt" Dec 05 18:20:51 crc kubenswrapper[4753]: I1205 18:20:51.940408 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-grnvs/must-gather-8f7zr"] Dec 05 18:20:51 crc kubenswrapper[4753]: I1205 18:20:51.970597 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/d8ff9c27-b92d-4c30-ba62-164ea425f02f-must-gather-output\") pod \"must-gather-8f7zr\" (UID: \"d8ff9c27-b92d-4c30-ba62-164ea425f02f\") " pod="openshift-must-gather-grnvs/must-gather-8f7zr" Dec 05 18:20:51 crc kubenswrapper[4753]: I1205 18:20:51.970671 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bcdt4\" (UniqueName: \"kubernetes.io/projected/d8ff9c27-b92d-4c30-ba62-164ea425f02f-kube-api-access-bcdt4\") pod \"must-gather-8f7zr\" (UID: \"d8ff9c27-b92d-4c30-ba62-164ea425f02f\") " pod="openshift-must-gather-grnvs/must-gather-8f7zr" Dec 05 18:20:52 crc kubenswrapper[4753]: I1205 18:20:52.072024 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bcdt4\" (UniqueName: \"kubernetes.io/projected/d8ff9c27-b92d-4c30-ba62-164ea425f02f-kube-api-access-bcdt4\") pod \"must-gather-8f7zr\" (UID: \"d8ff9c27-b92d-4c30-ba62-164ea425f02f\") " pod="openshift-must-gather-grnvs/must-gather-8f7zr" Dec 05 18:20:52 crc kubenswrapper[4753]: I1205 18:20:52.072474 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/d8ff9c27-b92d-4c30-ba62-164ea425f02f-must-gather-output\") pod \"must-gather-8f7zr\" (UID: \"d8ff9c27-b92d-4c30-ba62-164ea425f02f\") " pod="openshift-must-gather-grnvs/must-gather-8f7zr" Dec 05 18:20:52 crc kubenswrapper[4753]: I1205 18:20:52.072968 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/d8ff9c27-b92d-4c30-ba62-164ea425f02f-must-gather-output\") pod \"must-gather-8f7zr\" (UID: \"d8ff9c27-b92d-4c30-ba62-164ea425f02f\") " pod="openshift-must-gather-grnvs/must-gather-8f7zr" Dec 05 18:20:52 crc kubenswrapper[4753]: I1205 18:20:52.108233 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bcdt4\" (UniqueName: \"kubernetes.io/projected/d8ff9c27-b92d-4c30-ba62-164ea425f02f-kube-api-access-bcdt4\") pod \"must-gather-8f7zr\" (UID: \"d8ff9c27-b92d-4c30-ba62-164ea425f02f\") " pod="openshift-must-gather-grnvs/must-gather-8f7zr" Dec 05 18:20:52 crc kubenswrapper[4753]: I1205 18:20:52.234202 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-grnvs/must-gather-8f7zr" Dec 05 18:20:52 crc kubenswrapper[4753]: I1205 18:20:52.721669 4753 scope.go:117] "RemoveContainer" containerID="cc0f1cc17aa951c21ac505d180d094261b7dca04b8a1e989424ff5d5fd93fc78" Dec 05 18:20:52 crc kubenswrapper[4753]: E1205 18:20:52.722293 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 18:20:52 crc kubenswrapper[4753]: I1205 18:20:52.801109 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-grnvs/must-gather-8f7zr"] Dec 05 18:20:53 crc kubenswrapper[4753]: I1205 18:20:53.875776 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-grnvs/must-gather-8f7zr" event={"ID":"d8ff9c27-b92d-4c30-ba62-164ea425f02f","Type":"ContainerStarted","Data":"82e0327c3ecd7f3ab00ca469c1b49259727e3f729b79edd39e34080c8fe3333a"} Dec 05 18:20:53 crc kubenswrapper[4753]: I1205 18:20:53.876141 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-grnvs/must-gather-8f7zr" event={"ID":"d8ff9c27-b92d-4c30-ba62-164ea425f02f","Type":"ContainerStarted","Data":"533987c37c07456c2aa4069d5d86bb217315589689154061f52195f8a8027f9b"} Dec 05 18:20:53 crc kubenswrapper[4753]: I1205 18:20:53.876170 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-grnvs/must-gather-8f7zr" event={"ID":"d8ff9c27-b92d-4c30-ba62-164ea425f02f","Type":"ContainerStarted","Data":"533c83d87818d0968441a974adbaca5da50240cefbce57c402d76be7c3c764b3"} Dec 05 18:20:53 crc kubenswrapper[4753]: I1205 18:20:53.894508 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-grnvs/must-gather-8f7zr" podStartSLOduration=2.894490564 podStartE2EDuration="2.894490564s" podCreationTimestamp="2025-12-05 18:20:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 18:20:53.891885391 +0000 UTC m=+4592.394992397" watchObservedRunningTime="2025-12-05 18:20:53.894490564 +0000 UTC m=+4592.397597570" Dec 05 18:20:57 crc kubenswrapper[4753]: I1205 18:20:57.905716 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-grnvs/crc-debug-6bwgj"] Dec 05 18:20:57 crc kubenswrapper[4753]: I1205 18:20:57.907607 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-grnvs/crc-debug-6bwgj" Dec 05 18:20:57 crc kubenswrapper[4753]: I1205 18:20:57.909530 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-grnvs"/"default-dockercfg-qccnq" Dec 05 18:20:58 crc kubenswrapper[4753]: I1205 18:20:58.016708 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d87b4018-6394-41bc-a694-4176f5eb45d5-host\") pod \"crc-debug-6bwgj\" (UID: \"d87b4018-6394-41bc-a694-4176f5eb45d5\") " pod="openshift-must-gather-grnvs/crc-debug-6bwgj" Dec 05 18:20:58 crc kubenswrapper[4753]: I1205 18:20:58.017082 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hhmg2\" (UniqueName: \"kubernetes.io/projected/d87b4018-6394-41bc-a694-4176f5eb45d5-kube-api-access-hhmg2\") pod \"crc-debug-6bwgj\" (UID: \"d87b4018-6394-41bc-a694-4176f5eb45d5\") " pod="openshift-must-gather-grnvs/crc-debug-6bwgj" Dec 05 18:20:58 crc kubenswrapper[4753]: I1205 18:20:58.119721 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d87b4018-6394-41bc-a694-4176f5eb45d5-host\") pod \"crc-debug-6bwgj\" (UID: \"d87b4018-6394-41bc-a694-4176f5eb45d5\") " pod="openshift-must-gather-grnvs/crc-debug-6bwgj" Dec 05 18:20:58 crc kubenswrapper[4753]: I1205 18:20:58.119820 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hhmg2\" (UniqueName: \"kubernetes.io/projected/d87b4018-6394-41bc-a694-4176f5eb45d5-kube-api-access-hhmg2\") pod \"crc-debug-6bwgj\" (UID: \"d87b4018-6394-41bc-a694-4176f5eb45d5\") " pod="openshift-must-gather-grnvs/crc-debug-6bwgj" Dec 05 18:20:58 crc kubenswrapper[4753]: I1205 18:20:58.119875 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d87b4018-6394-41bc-a694-4176f5eb45d5-host\") pod \"crc-debug-6bwgj\" (UID: \"d87b4018-6394-41bc-a694-4176f5eb45d5\") " pod="openshift-must-gather-grnvs/crc-debug-6bwgj" Dec 05 18:20:58 crc kubenswrapper[4753]: I1205 18:20:58.136984 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hhmg2\" (UniqueName: \"kubernetes.io/projected/d87b4018-6394-41bc-a694-4176f5eb45d5-kube-api-access-hhmg2\") pod \"crc-debug-6bwgj\" (UID: \"d87b4018-6394-41bc-a694-4176f5eb45d5\") " pod="openshift-must-gather-grnvs/crc-debug-6bwgj" Dec 05 18:20:58 crc kubenswrapper[4753]: I1205 18:20:58.224075 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-grnvs/crc-debug-6bwgj" Dec 05 18:20:58 crc kubenswrapper[4753]: W1205 18:20:58.254126 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd87b4018_6394_41bc_a694_4176f5eb45d5.slice/crio-6f52161ea6fbe64a0999305f8073f41795a93aec71e4cf20d7940cf0ee7ddf4a WatchSource:0}: Error finding container 6f52161ea6fbe64a0999305f8073f41795a93aec71e4cf20d7940cf0ee7ddf4a: Status 404 returned error can't find the container with id 6f52161ea6fbe64a0999305f8073f41795a93aec71e4cf20d7940cf0ee7ddf4a Dec 05 18:20:58 crc kubenswrapper[4753]: I1205 18:20:58.920547 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-grnvs/crc-debug-6bwgj" event={"ID":"d87b4018-6394-41bc-a694-4176f5eb45d5","Type":"ContainerStarted","Data":"6c0045965cc4f076fbcfa7ddfe60111930c88ffe7d7d6b0d4f96b43bc99a67a2"} Dec 05 18:20:58 crc kubenswrapper[4753]: I1205 18:20:58.921003 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-grnvs/crc-debug-6bwgj" event={"ID":"d87b4018-6394-41bc-a694-4176f5eb45d5","Type":"ContainerStarted","Data":"6f52161ea6fbe64a0999305f8073f41795a93aec71e4cf20d7940cf0ee7ddf4a"} Dec 05 18:20:58 crc kubenswrapper[4753]: I1205 18:20:58.934471 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-grnvs/crc-debug-6bwgj" podStartSLOduration=1.9344526 podStartE2EDuration="1.9344526s" podCreationTimestamp="2025-12-05 18:20:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 18:20:58.931098675 +0000 UTC m=+4597.434205681" watchObservedRunningTime="2025-12-05 18:20:58.9344526 +0000 UTC m=+4597.437559626" Dec 05 18:21:05 crc kubenswrapper[4753]: I1205 18:21:05.721560 4753 scope.go:117] "RemoveContainer" containerID="cc0f1cc17aa951c21ac505d180d094261b7dca04b8a1e989424ff5d5fd93fc78" Dec 05 18:21:05 crc kubenswrapper[4753]: E1205 18:21:05.722925 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 18:21:15 crc kubenswrapper[4753]: I1205 18:21:15.608877 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-gfmbn"] Dec 05 18:21:15 crc kubenswrapper[4753]: I1205 18:21:15.614586 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gfmbn" Dec 05 18:21:15 crc kubenswrapper[4753]: I1205 18:21:15.659794 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-gfmbn"] Dec 05 18:21:15 crc kubenswrapper[4753]: I1205 18:21:15.717381 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-crfxp\" (UniqueName: \"kubernetes.io/projected/91be9ce3-8a31-4289-9bb4-1645b59233ca-kube-api-access-crfxp\") pod \"redhat-marketplace-gfmbn\" (UID: \"91be9ce3-8a31-4289-9bb4-1645b59233ca\") " pod="openshift-marketplace/redhat-marketplace-gfmbn" Dec 05 18:21:15 crc kubenswrapper[4753]: I1205 18:21:15.717533 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/91be9ce3-8a31-4289-9bb4-1645b59233ca-utilities\") pod \"redhat-marketplace-gfmbn\" (UID: \"91be9ce3-8a31-4289-9bb4-1645b59233ca\") " pod="openshift-marketplace/redhat-marketplace-gfmbn" Dec 05 18:21:15 crc kubenswrapper[4753]: I1205 18:21:15.718695 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/91be9ce3-8a31-4289-9bb4-1645b59233ca-catalog-content\") pod \"redhat-marketplace-gfmbn\" (UID: \"91be9ce3-8a31-4289-9bb4-1645b59233ca\") " pod="openshift-marketplace/redhat-marketplace-gfmbn" Dec 05 18:21:15 crc kubenswrapper[4753]: I1205 18:21:15.824135 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/91be9ce3-8a31-4289-9bb4-1645b59233ca-catalog-content\") pod \"redhat-marketplace-gfmbn\" (UID: \"91be9ce3-8a31-4289-9bb4-1645b59233ca\") " pod="openshift-marketplace/redhat-marketplace-gfmbn" Dec 05 18:21:15 crc kubenswrapper[4753]: I1205 18:21:15.824296 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-crfxp\" (UniqueName: \"kubernetes.io/projected/91be9ce3-8a31-4289-9bb4-1645b59233ca-kube-api-access-crfxp\") pod \"redhat-marketplace-gfmbn\" (UID: \"91be9ce3-8a31-4289-9bb4-1645b59233ca\") " pod="openshift-marketplace/redhat-marketplace-gfmbn" Dec 05 18:21:15 crc kubenswrapper[4753]: I1205 18:21:15.824394 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/91be9ce3-8a31-4289-9bb4-1645b59233ca-utilities\") pod \"redhat-marketplace-gfmbn\" (UID: \"91be9ce3-8a31-4289-9bb4-1645b59233ca\") " pod="openshift-marketplace/redhat-marketplace-gfmbn" Dec 05 18:21:15 crc kubenswrapper[4753]: I1205 18:21:15.825240 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/91be9ce3-8a31-4289-9bb4-1645b59233ca-utilities\") pod \"redhat-marketplace-gfmbn\" (UID: \"91be9ce3-8a31-4289-9bb4-1645b59233ca\") " pod="openshift-marketplace/redhat-marketplace-gfmbn" Dec 05 18:21:15 crc kubenswrapper[4753]: I1205 18:21:15.825781 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/91be9ce3-8a31-4289-9bb4-1645b59233ca-catalog-content\") pod \"redhat-marketplace-gfmbn\" (UID: \"91be9ce3-8a31-4289-9bb4-1645b59233ca\") " pod="openshift-marketplace/redhat-marketplace-gfmbn" Dec 05 18:21:15 crc kubenswrapper[4753]: I1205 18:21:15.857126 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-crfxp\" (UniqueName: \"kubernetes.io/projected/91be9ce3-8a31-4289-9bb4-1645b59233ca-kube-api-access-crfxp\") pod \"redhat-marketplace-gfmbn\" (UID: \"91be9ce3-8a31-4289-9bb4-1645b59233ca\") " pod="openshift-marketplace/redhat-marketplace-gfmbn" Dec 05 18:21:15 crc kubenswrapper[4753]: I1205 18:21:15.946405 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gfmbn" Dec 05 18:21:16 crc kubenswrapper[4753]: I1205 18:21:16.550547 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-gfmbn"] Dec 05 18:21:17 crc kubenswrapper[4753]: I1205 18:21:17.149935 4753 generic.go:334] "Generic (PLEG): container finished" podID="91be9ce3-8a31-4289-9bb4-1645b59233ca" containerID="4f1ab3de6e6742da74f61582b583dc5ec9973ca61c3f1ef82f6299acc854ec33" exitCode=0 Dec 05 18:21:17 crc kubenswrapper[4753]: I1205 18:21:17.149987 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gfmbn" event={"ID":"91be9ce3-8a31-4289-9bb4-1645b59233ca","Type":"ContainerDied","Data":"4f1ab3de6e6742da74f61582b583dc5ec9973ca61c3f1ef82f6299acc854ec33"} Dec 05 18:21:17 crc kubenswrapper[4753]: I1205 18:21:17.150261 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gfmbn" event={"ID":"91be9ce3-8a31-4289-9bb4-1645b59233ca","Type":"ContainerStarted","Data":"45e996d7bb2a95c34921b367d2a3c4bebaf6be3a899785f30435311009c36534"} Dec 05 18:21:17 crc kubenswrapper[4753]: I1205 18:21:17.151974 4753 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 18:21:17 crc kubenswrapper[4753]: I1205 18:21:17.721302 4753 scope.go:117] "RemoveContainer" containerID="cc0f1cc17aa951c21ac505d180d094261b7dca04b8a1e989424ff5d5fd93fc78" Dec 05 18:21:17 crc kubenswrapper[4753]: E1205 18:21:17.721585 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 18:21:18 crc kubenswrapper[4753]: I1205 18:21:18.161192 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gfmbn" event={"ID":"91be9ce3-8a31-4289-9bb4-1645b59233ca","Type":"ContainerStarted","Data":"eb4b2f31209c9e57b18a738fe6c441c4e376945ab5999c8f98304330bd9c5536"} Dec 05 18:21:19 crc kubenswrapper[4753]: I1205 18:21:19.172625 4753 generic.go:334] "Generic (PLEG): container finished" podID="91be9ce3-8a31-4289-9bb4-1645b59233ca" containerID="eb4b2f31209c9e57b18a738fe6c441c4e376945ab5999c8f98304330bd9c5536" exitCode=0 Dec 05 18:21:19 crc kubenswrapper[4753]: I1205 18:21:19.172903 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gfmbn" event={"ID":"91be9ce3-8a31-4289-9bb4-1645b59233ca","Type":"ContainerDied","Data":"eb4b2f31209c9e57b18a738fe6c441c4e376945ab5999c8f98304330bd9c5536"} Dec 05 18:21:20 crc kubenswrapper[4753]: I1205 18:21:20.187145 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gfmbn" event={"ID":"91be9ce3-8a31-4289-9bb4-1645b59233ca","Type":"ContainerStarted","Data":"4ddab95ba4bf8708c63f4e9664730d2233a90560580ba7de80d7785ba7c6f0ae"} Dec 05 18:21:20 crc kubenswrapper[4753]: I1205 18:21:20.209077 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-gfmbn" podStartSLOduration=2.788373344 podStartE2EDuration="5.209061256s" podCreationTimestamp="2025-12-05 18:21:15 +0000 UTC" firstStartedPulling="2025-12-05 18:21:17.151607237 +0000 UTC m=+4615.654714233" lastFinishedPulling="2025-12-05 18:21:19.572295139 +0000 UTC m=+4618.075402145" observedRunningTime="2025-12-05 18:21:20.203070926 +0000 UTC m=+4618.706177932" watchObservedRunningTime="2025-12-05 18:21:20.209061256 +0000 UTC m=+4618.712168262" Dec 05 18:21:25 crc kubenswrapper[4753]: I1205 18:21:25.947496 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-gfmbn" Dec 05 18:21:25 crc kubenswrapper[4753]: I1205 18:21:25.948036 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-gfmbn" Dec 05 18:21:26 crc kubenswrapper[4753]: I1205 18:21:26.001124 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-gfmbn" Dec 05 18:21:26 crc kubenswrapper[4753]: I1205 18:21:26.297088 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-gfmbn" Dec 05 18:21:26 crc kubenswrapper[4753]: I1205 18:21:26.351455 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-gfmbn"] Dec 05 18:21:28 crc kubenswrapper[4753]: I1205 18:21:28.261299 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-gfmbn" podUID="91be9ce3-8a31-4289-9bb4-1645b59233ca" containerName="registry-server" containerID="cri-o://4ddab95ba4bf8708c63f4e9664730d2233a90560580ba7de80d7785ba7c6f0ae" gracePeriod=2 Dec 05 18:21:28 crc kubenswrapper[4753]: I1205 18:21:28.919874 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gfmbn" Dec 05 18:21:28 crc kubenswrapper[4753]: I1205 18:21:28.940920 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/91be9ce3-8a31-4289-9bb4-1645b59233ca-catalog-content\") pod \"91be9ce3-8a31-4289-9bb4-1645b59233ca\" (UID: \"91be9ce3-8a31-4289-9bb4-1645b59233ca\") " Dec 05 18:21:28 crc kubenswrapper[4753]: I1205 18:21:28.940997 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-crfxp\" (UniqueName: \"kubernetes.io/projected/91be9ce3-8a31-4289-9bb4-1645b59233ca-kube-api-access-crfxp\") pod \"91be9ce3-8a31-4289-9bb4-1645b59233ca\" (UID: \"91be9ce3-8a31-4289-9bb4-1645b59233ca\") " Dec 05 18:21:28 crc kubenswrapper[4753]: I1205 18:21:28.941059 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/91be9ce3-8a31-4289-9bb4-1645b59233ca-utilities\") pod \"91be9ce3-8a31-4289-9bb4-1645b59233ca\" (UID: \"91be9ce3-8a31-4289-9bb4-1645b59233ca\") " Dec 05 18:21:28 crc kubenswrapper[4753]: I1205 18:21:28.942142 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/91be9ce3-8a31-4289-9bb4-1645b59233ca-utilities" (OuterVolumeSpecName: "utilities") pod "91be9ce3-8a31-4289-9bb4-1645b59233ca" (UID: "91be9ce3-8a31-4289-9bb4-1645b59233ca"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 18:21:28 crc kubenswrapper[4753]: I1205 18:21:28.971755 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/91be9ce3-8a31-4289-9bb4-1645b59233ca-kube-api-access-crfxp" (OuterVolumeSpecName: "kube-api-access-crfxp") pod "91be9ce3-8a31-4289-9bb4-1645b59233ca" (UID: "91be9ce3-8a31-4289-9bb4-1645b59233ca"). InnerVolumeSpecName "kube-api-access-crfxp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 18:21:28 crc kubenswrapper[4753]: I1205 18:21:28.974035 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/91be9ce3-8a31-4289-9bb4-1645b59233ca-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "91be9ce3-8a31-4289-9bb4-1645b59233ca" (UID: "91be9ce3-8a31-4289-9bb4-1645b59233ca"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 18:21:29 crc kubenswrapper[4753]: I1205 18:21:29.042525 4753 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/91be9ce3-8a31-4289-9bb4-1645b59233ca-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 18:21:29 crc kubenswrapper[4753]: I1205 18:21:29.042554 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-crfxp\" (UniqueName: \"kubernetes.io/projected/91be9ce3-8a31-4289-9bb4-1645b59233ca-kube-api-access-crfxp\") on node \"crc\" DevicePath \"\"" Dec 05 18:21:29 crc kubenswrapper[4753]: I1205 18:21:29.042564 4753 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/91be9ce3-8a31-4289-9bb4-1645b59233ca-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 18:21:29 crc kubenswrapper[4753]: I1205 18:21:29.272391 4753 generic.go:334] "Generic (PLEG): container finished" podID="91be9ce3-8a31-4289-9bb4-1645b59233ca" containerID="4ddab95ba4bf8708c63f4e9664730d2233a90560580ba7de80d7785ba7c6f0ae" exitCode=0 Dec 05 18:21:29 crc kubenswrapper[4753]: I1205 18:21:29.272440 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gfmbn" Dec 05 18:21:29 crc kubenswrapper[4753]: I1205 18:21:29.272447 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gfmbn" event={"ID":"91be9ce3-8a31-4289-9bb4-1645b59233ca","Type":"ContainerDied","Data":"4ddab95ba4bf8708c63f4e9664730d2233a90560580ba7de80d7785ba7c6f0ae"} Dec 05 18:21:29 crc kubenswrapper[4753]: I1205 18:21:29.272498 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gfmbn" event={"ID":"91be9ce3-8a31-4289-9bb4-1645b59233ca","Type":"ContainerDied","Data":"45e996d7bb2a95c34921b367d2a3c4bebaf6be3a899785f30435311009c36534"} Dec 05 18:21:29 crc kubenswrapper[4753]: I1205 18:21:29.272529 4753 scope.go:117] "RemoveContainer" containerID="4ddab95ba4bf8708c63f4e9664730d2233a90560580ba7de80d7785ba7c6f0ae" Dec 05 18:21:29 crc kubenswrapper[4753]: I1205 18:21:29.289977 4753 scope.go:117] "RemoveContainer" containerID="eb4b2f31209c9e57b18a738fe6c441c4e376945ab5999c8f98304330bd9c5536" Dec 05 18:21:29 crc kubenswrapper[4753]: I1205 18:21:29.328693 4753 scope.go:117] "RemoveContainer" containerID="4f1ab3de6e6742da74f61582b583dc5ec9973ca61c3f1ef82f6299acc854ec33" Dec 05 18:21:29 crc kubenswrapper[4753]: I1205 18:21:29.330072 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-gfmbn"] Dec 05 18:21:29 crc kubenswrapper[4753]: I1205 18:21:29.344849 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-gfmbn"] Dec 05 18:21:29 crc kubenswrapper[4753]: I1205 18:21:29.366611 4753 scope.go:117] "RemoveContainer" containerID="4ddab95ba4bf8708c63f4e9664730d2233a90560580ba7de80d7785ba7c6f0ae" Dec 05 18:21:29 crc kubenswrapper[4753]: E1205 18:21:29.367012 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4ddab95ba4bf8708c63f4e9664730d2233a90560580ba7de80d7785ba7c6f0ae\": container with ID starting with 4ddab95ba4bf8708c63f4e9664730d2233a90560580ba7de80d7785ba7c6f0ae not found: ID does not exist" containerID="4ddab95ba4bf8708c63f4e9664730d2233a90560580ba7de80d7785ba7c6f0ae" Dec 05 18:21:29 crc kubenswrapper[4753]: I1205 18:21:29.367042 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4ddab95ba4bf8708c63f4e9664730d2233a90560580ba7de80d7785ba7c6f0ae"} err="failed to get container status \"4ddab95ba4bf8708c63f4e9664730d2233a90560580ba7de80d7785ba7c6f0ae\": rpc error: code = NotFound desc = could not find container \"4ddab95ba4bf8708c63f4e9664730d2233a90560580ba7de80d7785ba7c6f0ae\": container with ID starting with 4ddab95ba4bf8708c63f4e9664730d2233a90560580ba7de80d7785ba7c6f0ae not found: ID does not exist" Dec 05 18:21:29 crc kubenswrapper[4753]: I1205 18:21:29.367066 4753 scope.go:117] "RemoveContainer" containerID="eb4b2f31209c9e57b18a738fe6c441c4e376945ab5999c8f98304330bd9c5536" Dec 05 18:21:29 crc kubenswrapper[4753]: E1205 18:21:29.367419 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eb4b2f31209c9e57b18a738fe6c441c4e376945ab5999c8f98304330bd9c5536\": container with ID starting with eb4b2f31209c9e57b18a738fe6c441c4e376945ab5999c8f98304330bd9c5536 not found: ID does not exist" containerID="eb4b2f31209c9e57b18a738fe6c441c4e376945ab5999c8f98304330bd9c5536" Dec 05 18:21:29 crc kubenswrapper[4753]: I1205 18:21:29.367442 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eb4b2f31209c9e57b18a738fe6c441c4e376945ab5999c8f98304330bd9c5536"} err="failed to get container status \"eb4b2f31209c9e57b18a738fe6c441c4e376945ab5999c8f98304330bd9c5536\": rpc error: code = NotFound desc = could not find container \"eb4b2f31209c9e57b18a738fe6c441c4e376945ab5999c8f98304330bd9c5536\": container with ID starting with eb4b2f31209c9e57b18a738fe6c441c4e376945ab5999c8f98304330bd9c5536 not found: ID does not exist" Dec 05 18:21:29 crc kubenswrapper[4753]: I1205 18:21:29.367454 4753 scope.go:117] "RemoveContainer" containerID="4f1ab3de6e6742da74f61582b583dc5ec9973ca61c3f1ef82f6299acc854ec33" Dec 05 18:21:29 crc kubenswrapper[4753]: E1205 18:21:29.367660 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4f1ab3de6e6742da74f61582b583dc5ec9973ca61c3f1ef82f6299acc854ec33\": container with ID starting with 4f1ab3de6e6742da74f61582b583dc5ec9973ca61c3f1ef82f6299acc854ec33 not found: ID does not exist" containerID="4f1ab3de6e6742da74f61582b583dc5ec9973ca61c3f1ef82f6299acc854ec33" Dec 05 18:21:29 crc kubenswrapper[4753]: I1205 18:21:29.367678 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4f1ab3de6e6742da74f61582b583dc5ec9973ca61c3f1ef82f6299acc854ec33"} err="failed to get container status \"4f1ab3de6e6742da74f61582b583dc5ec9973ca61c3f1ef82f6299acc854ec33\": rpc error: code = NotFound desc = could not find container \"4f1ab3de6e6742da74f61582b583dc5ec9973ca61c3f1ef82f6299acc854ec33\": container with ID starting with 4f1ab3de6e6742da74f61582b583dc5ec9973ca61c3f1ef82f6299acc854ec33 not found: ID does not exist" Dec 05 18:21:29 crc kubenswrapper[4753]: I1205 18:21:29.741097 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="91be9ce3-8a31-4289-9bb4-1645b59233ca" path="/var/lib/kubelet/pods/91be9ce3-8a31-4289-9bb4-1645b59233ca/volumes" Dec 05 18:21:30 crc kubenswrapper[4753]: I1205 18:21:30.721211 4753 scope.go:117] "RemoveContainer" containerID="cc0f1cc17aa951c21ac505d180d094261b7dca04b8a1e989424ff5d5fd93fc78" Dec 05 18:21:30 crc kubenswrapper[4753]: E1205 18:21:30.721773 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 18:21:32 crc kubenswrapper[4753]: I1205 18:21:32.301853 4753 generic.go:334] "Generic (PLEG): container finished" podID="d87b4018-6394-41bc-a694-4176f5eb45d5" containerID="6c0045965cc4f076fbcfa7ddfe60111930c88ffe7d7d6b0d4f96b43bc99a67a2" exitCode=0 Dec 05 18:21:32 crc kubenswrapper[4753]: I1205 18:21:32.301939 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-grnvs/crc-debug-6bwgj" event={"ID":"d87b4018-6394-41bc-a694-4176f5eb45d5","Type":"ContainerDied","Data":"6c0045965cc4f076fbcfa7ddfe60111930c88ffe7d7d6b0d4f96b43bc99a67a2"} Dec 05 18:21:33 crc kubenswrapper[4753]: I1205 18:21:33.692324 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-grnvs/crc-debug-6bwgj" Dec 05 18:21:33 crc kubenswrapper[4753]: I1205 18:21:33.733960 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-grnvs/crc-debug-6bwgj"] Dec 05 18:21:33 crc kubenswrapper[4753]: I1205 18:21:33.736983 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-grnvs/crc-debug-6bwgj"] Dec 05 18:21:33 crc kubenswrapper[4753]: I1205 18:21:33.836315 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d87b4018-6394-41bc-a694-4176f5eb45d5-host" (OuterVolumeSpecName: "host") pod "d87b4018-6394-41bc-a694-4176f5eb45d5" (UID: "d87b4018-6394-41bc-a694-4176f5eb45d5"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 18:21:33 crc kubenswrapper[4753]: I1205 18:21:33.836270 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d87b4018-6394-41bc-a694-4176f5eb45d5-host\") pod \"d87b4018-6394-41bc-a694-4176f5eb45d5\" (UID: \"d87b4018-6394-41bc-a694-4176f5eb45d5\") " Dec 05 18:21:33 crc kubenswrapper[4753]: I1205 18:21:33.839523 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hhmg2\" (UniqueName: \"kubernetes.io/projected/d87b4018-6394-41bc-a694-4176f5eb45d5-kube-api-access-hhmg2\") pod \"d87b4018-6394-41bc-a694-4176f5eb45d5\" (UID: \"d87b4018-6394-41bc-a694-4176f5eb45d5\") " Dec 05 18:21:33 crc kubenswrapper[4753]: I1205 18:21:33.856885 4753 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/d87b4018-6394-41bc-a694-4176f5eb45d5-host\") on node \"crc\" DevicePath \"\"" Dec 05 18:21:33 crc kubenswrapper[4753]: I1205 18:21:33.861710 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d87b4018-6394-41bc-a694-4176f5eb45d5-kube-api-access-hhmg2" (OuterVolumeSpecName: "kube-api-access-hhmg2") pod "d87b4018-6394-41bc-a694-4176f5eb45d5" (UID: "d87b4018-6394-41bc-a694-4176f5eb45d5"). InnerVolumeSpecName "kube-api-access-hhmg2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 18:21:33 crc kubenswrapper[4753]: I1205 18:21:33.958299 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hhmg2\" (UniqueName: \"kubernetes.io/projected/d87b4018-6394-41bc-a694-4176f5eb45d5-kube-api-access-hhmg2\") on node \"crc\" DevicePath \"\"" Dec 05 18:21:34 crc kubenswrapper[4753]: I1205 18:21:34.321845 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6f52161ea6fbe64a0999305f8073f41795a93aec71e4cf20d7940cf0ee7ddf4a" Dec 05 18:21:34 crc kubenswrapper[4753]: I1205 18:21:34.321927 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-grnvs/crc-debug-6bwgj" Dec 05 18:21:34 crc kubenswrapper[4753]: I1205 18:21:34.929815 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-grnvs/crc-debug-b224x"] Dec 05 18:21:34 crc kubenswrapper[4753]: E1205 18:21:34.930467 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91be9ce3-8a31-4289-9bb4-1645b59233ca" containerName="registry-server" Dec 05 18:21:34 crc kubenswrapper[4753]: I1205 18:21:34.930486 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="91be9ce3-8a31-4289-9bb4-1645b59233ca" containerName="registry-server" Dec 05 18:21:34 crc kubenswrapper[4753]: E1205 18:21:34.930501 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d87b4018-6394-41bc-a694-4176f5eb45d5" containerName="container-00" Dec 05 18:21:34 crc kubenswrapper[4753]: I1205 18:21:34.930507 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="d87b4018-6394-41bc-a694-4176f5eb45d5" containerName="container-00" Dec 05 18:21:34 crc kubenswrapper[4753]: E1205 18:21:34.930519 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91be9ce3-8a31-4289-9bb4-1645b59233ca" containerName="extract-utilities" Dec 05 18:21:34 crc kubenswrapper[4753]: I1205 18:21:34.930525 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="91be9ce3-8a31-4289-9bb4-1645b59233ca" containerName="extract-utilities" Dec 05 18:21:34 crc kubenswrapper[4753]: E1205 18:21:34.930537 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91be9ce3-8a31-4289-9bb4-1645b59233ca" containerName="extract-content" Dec 05 18:21:34 crc kubenswrapper[4753]: I1205 18:21:34.930542 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="91be9ce3-8a31-4289-9bb4-1645b59233ca" containerName="extract-content" Dec 05 18:21:34 crc kubenswrapper[4753]: I1205 18:21:34.930748 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="d87b4018-6394-41bc-a694-4176f5eb45d5" containerName="container-00" Dec 05 18:21:34 crc kubenswrapper[4753]: I1205 18:21:34.930773 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="91be9ce3-8a31-4289-9bb4-1645b59233ca" containerName="registry-server" Dec 05 18:21:34 crc kubenswrapper[4753]: I1205 18:21:34.931487 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-grnvs/crc-debug-b224x" Dec 05 18:21:34 crc kubenswrapper[4753]: I1205 18:21:34.933453 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-grnvs"/"default-dockercfg-qccnq" Dec 05 18:21:34 crc kubenswrapper[4753]: I1205 18:21:34.986542 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r8h9t\" (UniqueName: \"kubernetes.io/projected/347d6d51-1886-4307-8402-854335c46eae-kube-api-access-r8h9t\") pod \"crc-debug-b224x\" (UID: \"347d6d51-1886-4307-8402-854335c46eae\") " pod="openshift-must-gather-grnvs/crc-debug-b224x" Dec 05 18:21:34 crc kubenswrapper[4753]: I1205 18:21:34.986698 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/347d6d51-1886-4307-8402-854335c46eae-host\") pod \"crc-debug-b224x\" (UID: \"347d6d51-1886-4307-8402-854335c46eae\") " pod="openshift-must-gather-grnvs/crc-debug-b224x" Dec 05 18:21:35 crc kubenswrapper[4753]: I1205 18:21:35.088928 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r8h9t\" (UniqueName: \"kubernetes.io/projected/347d6d51-1886-4307-8402-854335c46eae-kube-api-access-r8h9t\") pod \"crc-debug-b224x\" (UID: \"347d6d51-1886-4307-8402-854335c46eae\") " pod="openshift-must-gather-grnvs/crc-debug-b224x" Dec 05 18:21:35 crc kubenswrapper[4753]: I1205 18:21:35.089361 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/347d6d51-1886-4307-8402-854335c46eae-host\") pod \"crc-debug-b224x\" (UID: \"347d6d51-1886-4307-8402-854335c46eae\") " pod="openshift-must-gather-grnvs/crc-debug-b224x" Dec 05 18:21:35 crc kubenswrapper[4753]: I1205 18:21:35.089676 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/347d6d51-1886-4307-8402-854335c46eae-host\") pod \"crc-debug-b224x\" (UID: \"347d6d51-1886-4307-8402-854335c46eae\") " pod="openshift-must-gather-grnvs/crc-debug-b224x" Dec 05 18:21:35 crc kubenswrapper[4753]: I1205 18:21:35.109183 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r8h9t\" (UniqueName: \"kubernetes.io/projected/347d6d51-1886-4307-8402-854335c46eae-kube-api-access-r8h9t\") pod \"crc-debug-b224x\" (UID: \"347d6d51-1886-4307-8402-854335c46eae\") " pod="openshift-must-gather-grnvs/crc-debug-b224x" Dec 05 18:21:35 crc kubenswrapper[4753]: I1205 18:21:35.256501 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-grnvs/crc-debug-b224x" Dec 05 18:21:35 crc kubenswrapper[4753]: I1205 18:21:35.336231 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-grnvs/crc-debug-b224x" event={"ID":"347d6d51-1886-4307-8402-854335c46eae","Type":"ContainerStarted","Data":"22c5129b84d26ddfcb38779baa368dd031577bfada7259cfe04674c7f9a674da"} Dec 05 18:21:35 crc kubenswrapper[4753]: I1205 18:21:35.735338 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d87b4018-6394-41bc-a694-4176f5eb45d5" path="/var/lib/kubelet/pods/d87b4018-6394-41bc-a694-4176f5eb45d5/volumes" Dec 05 18:21:36 crc kubenswrapper[4753]: I1205 18:21:36.350794 4753 generic.go:334] "Generic (PLEG): container finished" podID="347d6d51-1886-4307-8402-854335c46eae" containerID="f3b0d6222c31c9bd109f465c18e9096b728f1237d0f3b3af0dd661033a4ead00" exitCode=0 Dec 05 18:21:36 crc kubenswrapper[4753]: I1205 18:21:36.350843 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-grnvs/crc-debug-b224x" event={"ID":"347d6d51-1886-4307-8402-854335c46eae","Type":"ContainerDied","Data":"f3b0d6222c31c9bd109f465c18e9096b728f1237d0f3b3af0dd661033a4ead00"} Dec 05 18:21:37 crc kubenswrapper[4753]: I1205 18:21:37.327248 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-grnvs/crc-debug-b224x"] Dec 05 18:21:37 crc kubenswrapper[4753]: I1205 18:21:37.348284 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-grnvs/crc-debug-b224x"] Dec 05 18:21:37 crc kubenswrapper[4753]: I1205 18:21:37.506375 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-grnvs/crc-debug-b224x" Dec 05 18:21:37 crc kubenswrapper[4753]: I1205 18:21:37.540318 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r8h9t\" (UniqueName: \"kubernetes.io/projected/347d6d51-1886-4307-8402-854335c46eae-kube-api-access-r8h9t\") pod \"347d6d51-1886-4307-8402-854335c46eae\" (UID: \"347d6d51-1886-4307-8402-854335c46eae\") " Dec 05 18:21:37 crc kubenswrapper[4753]: I1205 18:21:37.540383 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/347d6d51-1886-4307-8402-854335c46eae-host\") pod \"347d6d51-1886-4307-8402-854335c46eae\" (UID: \"347d6d51-1886-4307-8402-854335c46eae\") " Dec 05 18:21:37 crc kubenswrapper[4753]: I1205 18:21:37.541071 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/347d6d51-1886-4307-8402-854335c46eae-host" (OuterVolumeSpecName: "host") pod "347d6d51-1886-4307-8402-854335c46eae" (UID: "347d6d51-1886-4307-8402-854335c46eae"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 18:21:37 crc kubenswrapper[4753]: I1205 18:21:37.552386 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/347d6d51-1886-4307-8402-854335c46eae-kube-api-access-r8h9t" (OuterVolumeSpecName: "kube-api-access-r8h9t") pod "347d6d51-1886-4307-8402-854335c46eae" (UID: "347d6d51-1886-4307-8402-854335c46eae"). InnerVolumeSpecName "kube-api-access-r8h9t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 18:21:37 crc kubenswrapper[4753]: I1205 18:21:37.643429 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r8h9t\" (UniqueName: \"kubernetes.io/projected/347d6d51-1886-4307-8402-854335c46eae-kube-api-access-r8h9t\") on node \"crc\" DevicePath \"\"" Dec 05 18:21:37 crc kubenswrapper[4753]: I1205 18:21:37.643460 4753 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/347d6d51-1886-4307-8402-854335c46eae-host\") on node \"crc\" DevicePath \"\"" Dec 05 18:21:37 crc kubenswrapper[4753]: I1205 18:21:37.731236 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="347d6d51-1886-4307-8402-854335c46eae" path="/var/lib/kubelet/pods/347d6d51-1886-4307-8402-854335c46eae/volumes" Dec 05 18:21:38 crc kubenswrapper[4753]: I1205 18:21:38.374706 4753 scope.go:117] "RemoveContainer" containerID="f3b0d6222c31c9bd109f465c18e9096b728f1237d0f3b3af0dd661033a4ead00" Dec 05 18:21:38 crc kubenswrapper[4753]: I1205 18:21:38.374748 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-grnvs/crc-debug-b224x" Dec 05 18:21:38 crc kubenswrapper[4753]: I1205 18:21:38.823531 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-grnvs/crc-debug-jsbl2"] Dec 05 18:21:38 crc kubenswrapper[4753]: E1205 18:21:38.824208 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="347d6d51-1886-4307-8402-854335c46eae" containerName="container-00" Dec 05 18:21:38 crc kubenswrapper[4753]: I1205 18:21:38.824223 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="347d6d51-1886-4307-8402-854335c46eae" containerName="container-00" Dec 05 18:21:38 crc kubenswrapper[4753]: I1205 18:21:38.824459 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="347d6d51-1886-4307-8402-854335c46eae" containerName="container-00" Dec 05 18:21:38 crc kubenswrapper[4753]: I1205 18:21:38.825201 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-grnvs/crc-debug-jsbl2" Dec 05 18:21:38 crc kubenswrapper[4753]: I1205 18:21:38.826858 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-grnvs"/"default-dockercfg-qccnq" Dec 05 18:21:38 crc kubenswrapper[4753]: I1205 18:21:38.864211 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0fd895e3-88fb-4ec2-afc3-09ff5b6b13f5-host\") pod \"crc-debug-jsbl2\" (UID: \"0fd895e3-88fb-4ec2-afc3-09ff5b6b13f5\") " pod="openshift-must-gather-grnvs/crc-debug-jsbl2" Dec 05 18:21:38 crc kubenswrapper[4753]: I1205 18:21:38.864267 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nfgnp\" (UniqueName: \"kubernetes.io/projected/0fd895e3-88fb-4ec2-afc3-09ff5b6b13f5-kube-api-access-nfgnp\") pod \"crc-debug-jsbl2\" (UID: \"0fd895e3-88fb-4ec2-afc3-09ff5b6b13f5\") " pod="openshift-must-gather-grnvs/crc-debug-jsbl2" Dec 05 18:21:38 crc kubenswrapper[4753]: I1205 18:21:38.966744 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0fd895e3-88fb-4ec2-afc3-09ff5b6b13f5-host\") pod \"crc-debug-jsbl2\" (UID: \"0fd895e3-88fb-4ec2-afc3-09ff5b6b13f5\") " pod="openshift-must-gather-grnvs/crc-debug-jsbl2" Dec 05 18:21:38 crc kubenswrapper[4753]: I1205 18:21:38.966816 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nfgnp\" (UniqueName: \"kubernetes.io/projected/0fd895e3-88fb-4ec2-afc3-09ff5b6b13f5-kube-api-access-nfgnp\") pod \"crc-debug-jsbl2\" (UID: \"0fd895e3-88fb-4ec2-afc3-09ff5b6b13f5\") " pod="openshift-must-gather-grnvs/crc-debug-jsbl2" Dec 05 18:21:38 crc kubenswrapper[4753]: I1205 18:21:38.966883 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0fd895e3-88fb-4ec2-afc3-09ff5b6b13f5-host\") pod \"crc-debug-jsbl2\" (UID: \"0fd895e3-88fb-4ec2-afc3-09ff5b6b13f5\") " pod="openshift-must-gather-grnvs/crc-debug-jsbl2" Dec 05 18:21:38 crc kubenswrapper[4753]: I1205 18:21:38.986511 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nfgnp\" (UniqueName: \"kubernetes.io/projected/0fd895e3-88fb-4ec2-afc3-09ff5b6b13f5-kube-api-access-nfgnp\") pod \"crc-debug-jsbl2\" (UID: \"0fd895e3-88fb-4ec2-afc3-09ff5b6b13f5\") " pod="openshift-must-gather-grnvs/crc-debug-jsbl2" Dec 05 18:21:39 crc kubenswrapper[4753]: I1205 18:21:39.140882 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-grnvs/crc-debug-jsbl2" Dec 05 18:21:39 crc kubenswrapper[4753]: W1205 18:21:39.193010 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0fd895e3_88fb_4ec2_afc3_09ff5b6b13f5.slice/crio-b449926f8dd05a7fcdcce610cd72330bb72117cc188e2b3b9a77819e4b40bb52 WatchSource:0}: Error finding container b449926f8dd05a7fcdcce610cd72330bb72117cc188e2b3b9a77819e4b40bb52: Status 404 returned error can't find the container with id b449926f8dd05a7fcdcce610cd72330bb72117cc188e2b3b9a77819e4b40bb52 Dec 05 18:21:39 crc kubenswrapper[4753]: I1205 18:21:39.389181 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-grnvs/crc-debug-jsbl2" event={"ID":"0fd895e3-88fb-4ec2-afc3-09ff5b6b13f5","Type":"ContainerStarted","Data":"b449926f8dd05a7fcdcce610cd72330bb72117cc188e2b3b9a77819e4b40bb52"} Dec 05 18:21:40 crc kubenswrapper[4753]: I1205 18:21:40.398704 4753 generic.go:334] "Generic (PLEG): container finished" podID="0fd895e3-88fb-4ec2-afc3-09ff5b6b13f5" containerID="bdc134715163ef438916da73989f969067435a815d2a6904d37e00b59f28d470" exitCode=0 Dec 05 18:21:40 crc kubenswrapper[4753]: I1205 18:21:40.398973 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-grnvs/crc-debug-jsbl2" event={"ID":"0fd895e3-88fb-4ec2-afc3-09ff5b6b13f5","Type":"ContainerDied","Data":"bdc134715163ef438916da73989f969067435a815d2a6904d37e00b59f28d470"} Dec 05 18:21:40 crc kubenswrapper[4753]: I1205 18:21:40.458468 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-grnvs/crc-debug-jsbl2"] Dec 05 18:21:40 crc kubenswrapper[4753]: I1205 18:21:40.475519 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-grnvs/crc-debug-jsbl2"] Dec 05 18:21:41 crc kubenswrapper[4753]: I1205 18:21:41.549098 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-grnvs/crc-debug-jsbl2" Dec 05 18:21:41 crc kubenswrapper[4753]: I1205 18:21:41.642808 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0fd895e3-88fb-4ec2-afc3-09ff5b6b13f5-host\") pod \"0fd895e3-88fb-4ec2-afc3-09ff5b6b13f5\" (UID: \"0fd895e3-88fb-4ec2-afc3-09ff5b6b13f5\") " Dec 05 18:21:41 crc kubenswrapper[4753]: I1205 18:21:41.642983 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0fd895e3-88fb-4ec2-afc3-09ff5b6b13f5-host" (OuterVolumeSpecName: "host") pod "0fd895e3-88fb-4ec2-afc3-09ff5b6b13f5" (UID: "0fd895e3-88fb-4ec2-afc3-09ff5b6b13f5"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 18:21:41 crc kubenswrapper[4753]: I1205 18:21:41.643026 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nfgnp\" (UniqueName: \"kubernetes.io/projected/0fd895e3-88fb-4ec2-afc3-09ff5b6b13f5-kube-api-access-nfgnp\") pod \"0fd895e3-88fb-4ec2-afc3-09ff5b6b13f5\" (UID: \"0fd895e3-88fb-4ec2-afc3-09ff5b6b13f5\") " Dec 05 18:21:41 crc kubenswrapper[4753]: I1205 18:21:41.643440 4753 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0fd895e3-88fb-4ec2-afc3-09ff5b6b13f5-host\") on node \"crc\" DevicePath \"\"" Dec 05 18:21:41 crc kubenswrapper[4753]: I1205 18:21:41.658410 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0fd895e3-88fb-4ec2-afc3-09ff5b6b13f5-kube-api-access-nfgnp" (OuterVolumeSpecName: "kube-api-access-nfgnp") pod "0fd895e3-88fb-4ec2-afc3-09ff5b6b13f5" (UID: "0fd895e3-88fb-4ec2-afc3-09ff5b6b13f5"). InnerVolumeSpecName "kube-api-access-nfgnp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 18:21:41 crc kubenswrapper[4753]: I1205 18:21:41.738651 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0fd895e3-88fb-4ec2-afc3-09ff5b6b13f5" path="/var/lib/kubelet/pods/0fd895e3-88fb-4ec2-afc3-09ff5b6b13f5/volumes" Dec 05 18:21:41 crc kubenswrapper[4753]: I1205 18:21:41.749953 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nfgnp\" (UniqueName: \"kubernetes.io/projected/0fd895e3-88fb-4ec2-afc3-09ff5b6b13f5-kube-api-access-nfgnp\") on node \"crc\" DevicePath \"\"" Dec 05 18:21:42 crc kubenswrapper[4753]: I1205 18:21:42.421779 4753 scope.go:117] "RemoveContainer" containerID="bdc134715163ef438916da73989f969067435a815d2a6904d37e00b59f28d470" Dec 05 18:21:42 crc kubenswrapper[4753]: I1205 18:21:42.421828 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-grnvs/crc-debug-jsbl2" Dec 05 18:21:44 crc kubenswrapper[4753]: I1205 18:21:44.720872 4753 scope.go:117] "RemoveContainer" containerID="cc0f1cc17aa951c21ac505d180d094261b7dca04b8a1e989424ff5d5fd93fc78" Dec 05 18:21:44 crc kubenswrapper[4753]: E1205 18:21:44.724901 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 18:21:59 crc kubenswrapper[4753]: I1205 18:21:59.721172 4753 scope.go:117] "RemoveContainer" containerID="cc0f1cc17aa951c21ac505d180d094261b7dca04b8a1e989424ff5d5fd93fc78" Dec 05 18:21:59 crc kubenswrapper[4753]: E1205 18:21:59.722042 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 18:22:11 crc kubenswrapper[4753]: I1205 18:22:11.726354 4753 scope.go:117] "RemoveContainer" containerID="cc0f1cc17aa951c21ac505d180d094261b7dca04b8a1e989424ff5d5fd93fc78" Dec 05 18:22:11 crc kubenswrapper[4753]: E1205 18:22:11.727017 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 18:22:22 crc kubenswrapper[4753]: I1205 18:22:22.720541 4753 scope.go:117] "RemoveContainer" containerID="cc0f1cc17aa951c21ac505d180d094261b7dca04b8a1e989424ff5d5fd93fc78" Dec 05 18:22:22 crc kubenswrapper[4753]: E1205 18:22:22.721315 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 18:22:30 crc kubenswrapper[4753]: I1205 18:22:30.812030 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_f409c39c-6d5a-4950-bd92-2ab8a26ad831/init-config-reloader/0.log" Dec 05 18:22:31 crc kubenswrapper[4753]: I1205 18:22:31.086993 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_f409c39c-6d5a-4950-bd92-2ab8a26ad831/init-config-reloader/0.log" Dec 05 18:22:31 crc kubenswrapper[4753]: I1205 18:22:31.091862 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_f409c39c-6d5a-4950-bd92-2ab8a26ad831/alertmanager/0.log" Dec 05 18:22:31 crc kubenswrapper[4753]: I1205 18:22:31.106550 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_alertmanager-metric-storage-0_f409c39c-6d5a-4950-bd92-2ab8a26ad831/config-reloader/0.log" Dec 05 18:22:31 crc kubenswrapper[4753]: I1205 18:22:31.308163 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-7f996df5b6-dlvm2_2f3c34d4-e5fc-41ba-a8f0-3a5df24c9013/barbican-api-log/0.log" Dec 05 18:22:31 crc kubenswrapper[4753]: I1205 18:22:31.355334 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-7f996df5b6-dlvm2_2f3c34d4-e5fc-41ba-a8f0-3a5df24c9013/barbican-api/0.log" Dec 05 18:22:31 crc kubenswrapper[4753]: I1205 18:22:31.369586 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-58b544895d-h2wcr_c272889e-62f7-4ce2-8a38-e15945d984d9/barbican-keystone-listener/0.log" Dec 05 18:22:31 crc kubenswrapper[4753]: I1205 18:22:31.580467 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-58b544895d-h2wcr_c272889e-62f7-4ce2-8a38-e15945d984d9/barbican-keystone-listener-log/0.log" Dec 05 18:22:31 crc kubenswrapper[4753]: I1205 18:22:31.584078 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-69b574cd87-g8xmw_491b4a49-e02d-41a2-b783-b3dddbedbc57/barbican-worker/0.log" Dec 05 18:22:31 crc kubenswrapper[4753]: I1205 18:22:31.796187 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-69b574cd87-g8xmw_491b4a49-e02d-41a2-b783-b3dddbedbc57/barbican-worker-log/0.log" Dec 05 18:22:32 crc kubenswrapper[4753]: I1205 18:22:32.036621 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-fzcvn_a40f9ec8-5379-4355-b524-fed440fdf2d6/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 18:22:32 crc kubenswrapper[4753]: I1205 18:22:32.132136 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_5e8ed50a-c2d1-47b1-92e2-db51cc75a4c7/ceilometer-central-agent/0.log" Dec 05 18:22:32 crc kubenswrapper[4753]: I1205 18:22:32.211340 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_5e8ed50a-c2d1-47b1-92e2-db51cc75a4c7/ceilometer-notification-agent/0.log" Dec 05 18:22:32 crc kubenswrapper[4753]: I1205 18:22:32.254553 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_5e8ed50a-c2d1-47b1-92e2-db51cc75a4c7/proxy-httpd/0.log" Dec 05 18:22:32 crc kubenswrapper[4753]: I1205 18:22:32.314998 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_5e8ed50a-c2d1-47b1-92e2-db51cc75a4c7/sg-core/0.log" Dec 05 18:22:32 crc kubenswrapper[4753]: I1205 18:22:32.468505 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_8adad7e9-de7d-440a-9ac9-55882e2fd944/cinder-api-log/0.log" Dec 05 18:22:32 crc kubenswrapper[4753]: I1205 18:22:32.511436 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_8adad7e9-de7d-440a-9ac9-55882e2fd944/cinder-api/0.log" Dec 05 18:22:32 crc kubenswrapper[4753]: I1205 18:22:32.678011 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_802deda2-7602-46a4-b4d0-25cd167bbdf2/cinder-scheduler/0.log" Dec 05 18:22:32 crc kubenswrapper[4753]: I1205 18:22:32.771128 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_802deda2-7602-46a4-b4d0-25cd167bbdf2/probe/0.log" Dec 05 18:22:32 crc kubenswrapper[4753]: I1205 18:22:32.895915 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-api-0_5f627cef-fbe5-40ef-beeb-e3b08861c449/cloudkitty-api-log/0.log" Dec 05 18:22:32 crc kubenswrapper[4753]: I1205 18:22:32.937070 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-api-0_5f627cef-fbe5-40ef-beeb-e3b08861c449/cloudkitty-api/0.log" Dec 05 18:22:33 crc kubenswrapper[4753]: I1205 18:22:33.143281 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-compactor-0_55b8a3d1-6aee-4a8d-8b7e-5f69ed46970b/loki-compactor/0.log" Dec 05 18:22:33 crc kubenswrapper[4753]: I1205 18:22:33.162594 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-distributor-664b687b54-jj67d_16d583e9-9ea0-4222-a38a-f8e1be33cdae/loki-distributor/0.log" Dec 05 18:22:33 crc kubenswrapper[4753]: I1205 18:22:33.304768 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-gateway-bc75944f-8prh2_389b7205-589e-4027-ae02-ba2287c7e0ed/gateway/0.log" Dec 05 18:22:33 crc kubenswrapper[4753]: I1205 18:22:33.379393 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-gateway-bc75944f-pqqjw_8f82a8e6-b07e-4bf9-801e-04c1f96fe703/gateway/0.log" Dec 05 18:22:33 crc kubenswrapper[4753]: I1205 18:22:33.593090 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-index-gateway-0_93568770-efee-4906-b491-17d0664bfa8b/loki-index-gateway/0.log" Dec 05 18:22:33 crc kubenswrapper[4753]: I1205 18:22:33.728174 4753 scope.go:117] "RemoveContainer" containerID="cc0f1cc17aa951c21ac505d180d094261b7dca04b8a1e989424ff5d5fd93fc78" Dec 05 18:22:33 crc kubenswrapper[4753]: E1205 18:22:33.728591 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 18:22:33 crc kubenswrapper[4753]: I1205 18:22:33.872086 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-ingester-0_227cc7e4-602f-4c1e-afa7-0e106d3f505f/loki-ingester/0.log" Dec 05 18:22:34 crc kubenswrapper[4753]: I1205 18:22:34.108536 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-query-frontend-7c8cd744d9-vh5xk_10df4aa4-d920-45d4-9592-72c32d59c312/loki-query-frontend/0.log" Dec 05 18:22:34 crc kubenswrapper[4753]: I1205 18:22:34.429384 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-nhc8m_9e4d29b1-9d77-4744-85ed-e6882651cea9/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 18:22:34 crc kubenswrapper[4753]: I1205 18:22:34.626679 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-sgwrx_476af6d7-20f2-4345-96ad-219ab22e904b/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 18:22:34 crc kubenswrapper[4753]: I1205 18:22:34.649983 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-5475ccd585-nfqd8_d81db6a5-e580-4e70-92bb-437f1c03f5b4/init/0.log" Dec 05 18:22:34 crc kubenswrapper[4753]: I1205 18:22:34.754228 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-lokistack-querier-5467947bf7-7l6pk_89ac2139-b38d-40b1-939d-b23748c819d0/loki-querier/0.log" Dec 05 18:22:35 crc kubenswrapper[4753]: I1205 18:22:35.024368 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-5475ccd585-nfqd8_d81db6a5-e580-4e70-92bb-437f1c03f5b4/init/0.log" Dec 05 18:22:35 crc kubenswrapper[4753]: I1205 18:22:35.610033 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-5475ccd585-nfqd8_d81db6a5-e580-4e70-92bb-437f1c03f5b4/dnsmasq-dns/0.log" Dec 05 18:22:35 crc kubenswrapper[4753]: I1205 18:22:35.696919 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-2drf7_825da353-e856-45ac-9cff-027d1f16663a/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 18:22:35 crc kubenswrapper[4753]: I1205 18:22:35.851370 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_187c344a-5fdf-47db-b103-de9458e6a58a/glance-httpd/0.log" Dec 05 18:22:35 crc kubenswrapper[4753]: I1205 18:22:35.904760 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_187c344a-5fdf-47db-b103-de9458e6a58a/glance-log/0.log" Dec 05 18:22:35 crc kubenswrapper[4753]: I1205 18:22:35.934380 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_33797bcf-e2f2-4a28-8148-3e027fc342d8/glance-httpd/0.log" Dec 05 18:22:36 crc kubenswrapper[4753]: I1205 18:22:36.075738 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_33797bcf-e2f2-4a28-8148-3e027fc342d8/glance-log/0.log" Dec 05 18:22:36 crc kubenswrapper[4753]: I1205 18:22:36.119109 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-2c89w_3e9951b8-30f1-4aea-947e-d69fcb39bdcf/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 18:22:36 crc kubenswrapper[4753]: I1205 18:22:36.403077 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-7gbc5_5678941d-59cd-487e-82a4-f2cf0bf528a7/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 18:22:36 crc kubenswrapper[4753]: I1205 18:22:36.678910 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29415961-8rgrd_487332b8-9414-4d94-b52e-9deb57aaf729/keystone-cron/0.log" Dec 05 18:22:36 crc kubenswrapper[4753]: I1205 18:22:36.819352 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-578b67ddb8-fsb8m_4e39f06b-3be0-4d99-a8b8-627de083ff81/keystone-api/0.log" Dec 05 18:22:37 crc kubenswrapper[4753]: I1205 18:22:37.035662 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_37b1ed0b-5977-4294-b5c4-0d9d0abd6520/kube-state-metrics/0.log" Dec 05 18:22:37 crc kubenswrapper[4753]: I1205 18:22:37.215482 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-c9d6z_d4e88c75-6ff8-45d0-ba9e-124b2acf1ba5/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 18:22:37 crc kubenswrapper[4753]: I1205 18:22:37.563902 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-6d4f9bcbff-wqwgp_a2d7ec08-5ff8-4470-a4a7-2d830d3f5261/neutron-httpd/0.log" Dec 05 18:22:37 crc kubenswrapper[4753]: I1205 18:22:37.599882 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-6d4f9bcbff-wqwgp_a2d7ec08-5ff8-4470-a4a7-2d830d3f5261/neutron-api/0.log" Dec 05 18:22:37 crc kubenswrapper[4753]: I1205 18:22:37.754628 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-7l2pf_ea5f795b-6ef0-4281-a619-1a89b547e436/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 18:22:38 crc kubenswrapper[4753]: I1205 18:22:38.367640 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_3aaf728a-8d40-4b45-9f79-a5bb36ee9a57/nova-api-log/0.log" Dec 05 18:22:38 crc kubenswrapper[4753]: I1205 18:22:38.618961 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_0295546c-bece-4b19-ae35-8188830dab3b/nova-cell0-conductor-conductor/0.log" Dec 05 18:22:38 crc kubenswrapper[4753]: I1205 18:22:38.850399 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_3aaf728a-8d40-4b45-9f79-a5bb36ee9a57/nova-api-api/0.log" Dec 05 18:22:39 crc kubenswrapper[4753]: I1205 18:22:39.007010 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_696fed1a-38d3-459f-b08b-128b8d41d472/nova-cell1-conductor-conductor/0.log" Dec 05 18:22:39 crc kubenswrapper[4753]: I1205 18:22:39.166407 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_059b378d-55a3-4652-96ac-804b19815e8d/nova-cell1-novncproxy-novncproxy/0.log" Dec 05 18:22:39 crc kubenswrapper[4753]: I1205 18:22:39.283250 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-tcnxd_352d3fb5-7ce3-4d4c-8c1a-1a9adf0ef1b1/nova-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 18:22:39 crc kubenswrapper[4753]: I1205 18:22:39.484557 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_f56a1bea-c258-4ed6-a43c-2d006aaa4a23/nova-metadata-log/0.log" Dec 05 18:22:39 crc kubenswrapper[4753]: I1205 18:22:39.995055 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_1f6bb960-f1d1-413e-bd11-aa0d1251135f/nova-scheduler-scheduler/0.log" Dec 05 18:22:40 crc kubenswrapper[4753]: I1205 18:22:40.037126 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_4c8d8a7a-38bd-49d9-8f25-5495c32462bc/mysql-bootstrap/0.log" Dec 05 18:22:40 crc kubenswrapper[4753]: I1205 18:22:40.182026 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_4c8d8a7a-38bd-49d9-8f25-5495c32462bc/mysql-bootstrap/0.log" Dec 05 18:22:40 crc kubenswrapper[4753]: I1205 18:22:40.233410 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_4c8d8a7a-38bd-49d9-8f25-5495c32462bc/galera/0.log" Dec 05 18:22:40 crc kubenswrapper[4753]: I1205 18:22:40.438378 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_9df69769-e394-444f-b6e2-e788e989fe92/mysql-bootstrap/0.log" Dec 05 18:22:40 crc kubenswrapper[4753]: I1205 18:22:40.696737 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_9df69769-e394-444f-b6e2-e788e989fe92/mysql-bootstrap/0.log" Dec 05 18:22:40 crc kubenswrapper[4753]: I1205 18:22:40.705929 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_9df69769-e394-444f-b6e2-e788e989fe92/galera/0.log" Dec 05 18:22:40 crc kubenswrapper[4753]: I1205 18:22:40.748074 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cloudkitty-proc-0_11174666-3e97-47ce-90f5-55ee37dddf75/cloudkitty-proc/0.log" Dec 05 18:22:40 crc kubenswrapper[4753]: I1205 18:22:40.890636 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_1917fbcd-3d32-4ceb-aeab-1119aa3d4771/openstackclient/0.log" Dec 05 18:22:41 crc kubenswrapper[4753]: I1205 18:22:41.107421 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-8m7cw_9f33836c-96c6-4da3-b2d6-e9c12631f2b4/ovn-controller/0.log" Dec 05 18:22:41 crc kubenswrapper[4753]: I1205 18:22:41.117720 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_f56a1bea-c258-4ed6-a43c-2d006aaa4a23/nova-metadata-metadata/0.log" Dec 05 18:22:41 crc kubenswrapper[4753]: I1205 18:22:41.314080 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-sp59x_85fb40be-ac2e-404f-912b-2831ae6eb795/openstack-network-exporter/0.log" Dec 05 18:22:41 crc kubenswrapper[4753]: I1205 18:22:41.321163 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-qhphp_284db7f3-ca89-447a-90eb-487d43e49f7d/ovsdb-server-init/0.log" Dec 05 18:22:41 crc kubenswrapper[4753]: I1205 18:22:41.614234 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-qhphp_284db7f3-ca89-447a-90eb-487d43e49f7d/ovs-vswitchd/0.log" Dec 05 18:22:41 crc kubenswrapper[4753]: I1205 18:22:41.647810 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-qhphp_284db7f3-ca89-447a-90eb-487d43e49f7d/ovsdb-server-init/0.log" Dec 05 18:22:41 crc kubenswrapper[4753]: I1205 18:22:41.699216 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-qhphp_284db7f3-ca89-447a-90eb-487d43e49f7d/ovsdb-server/0.log" Dec 05 18:22:41 crc kubenswrapper[4753]: I1205 18:22:41.859372 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-kx88v_e2f8ca40-16d5-4a17-80a1-f5bf12f92d71/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 18:22:41 crc kubenswrapper[4753]: I1205 18:22:41.904493 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_a2c3f794-ac1f-4115-bf82-a43f3a487332/openstack-network-exporter/0.log" Dec 05 18:22:42 crc kubenswrapper[4753]: I1205 18:22:42.087527 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_a2c3f794-ac1f-4115-bf82-a43f3a487332/ovn-northd/0.log" Dec 05 18:22:42 crc kubenswrapper[4753]: I1205 18:22:42.089169 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-62kl4"] Dec 05 18:22:42 crc kubenswrapper[4753]: E1205 18:22:42.089642 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0fd895e3-88fb-4ec2-afc3-09ff5b6b13f5" containerName="container-00" Dec 05 18:22:42 crc kubenswrapper[4753]: I1205 18:22:42.089659 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="0fd895e3-88fb-4ec2-afc3-09ff5b6b13f5" containerName="container-00" Dec 05 18:22:42 crc kubenswrapper[4753]: I1205 18:22:42.089892 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="0fd895e3-88fb-4ec2-afc3-09ff5b6b13f5" containerName="container-00" Dec 05 18:22:42 crc kubenswrapper[4753]: I1205 18:22:42.091677 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-62kl4" Dec 05 18:22:42 crc kubenswrapper[4753]: I1205 18:22:42.116431 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-62kl4"] Dec 05 18:22:42 crc kubenswrapper[4753]: I1205 18:22:42.166951 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_f8c13e8e-fec9-49e2-a2b0-5ca0473d2469/ovsdbserver-nb/0.log" Dec 05 18:22:42 crc kubenswrapper[4753]: I1205 18:22:42.217591 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_f8c13e8e-fec9-49e2-a2b0-5ca0473d2469/openstack-network-exporter/0.log" Dec 05 18:22:42 crc kubenswrapper[4753]: I1205 18:22:42.250516 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pswn7\" (UniqueName: \"kubernetes.io/projected/716b6ff9-df22-40ec-9c61-095d7d43a302-kube-api-access-pswn7\") pod \"certified-operators-62kl4\" (UID: \"716b6ff9-df22-40ec-9c61-095d7d43a302\") " pod="openshift-marketplace/certified-operators-62kl4" Dec 05 18:22:42 crc kubenswrapper[4753]: I1205 18:22:42.250615 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/716b6ff9-df22-40ec-9c61-095d7d43a302-catalog-content\") pod \"certified-operators-62kl4\" (UID: \"716b6ff9-df22-40ec-9c61-095d7d43a302\") " pod="openshift-marketplace/certified-operators-62kl4" Dec 05 18:22:42 crc kubenswrapper[4753]: I1205 18:22:42.250727 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/716b6ff9-df22-40ec-9c61-095d7d43a302-utilities\") pod \"certified-operators-62kl4\" (UID: \"716b6ff9-df22-40ec-9c61-095d7d43a302\") " pod="openshift-marketplace/certified-operators-62kl4" Dec 05 18:22:42 crc kubenswrapper[4753]: I1205 18:22:42.351888 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/716b6ff9-df22-40ec-9c61-095d7d43a302-utilities\") pod \"certified-operators-62kl4\" (UID: \"716b6ff9-df22-40ec-9c61-095d7d43a302\") " pod="openshift-marketplace/certified-operators-62kl4" Dec 05 18:22:42 crc kubenswrapper[4753]: I1205 18:22:42.351962 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pswn7\" (UniqueName: \"kubernetes.io/projected/716b6ff9-df22-40ec-9c61-095d7d43a302-kube-api-access-pswn7\") pod \"certified-operators-62kl4\" (UID: \"716b6ff9-df22-40ec-9c61-095d7d43a302\") " pod="openshift-marketplace/certified-operators-62kl4" Dec 05 18:22:42 crc kubenswrapper[4753]: I1205 18:22:42.352040 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/716b6ff9-df22-40ec-9c61-095d7d43a302-catalog-content\") pod \"certified-operators-62kl4\" (UID: \"716b6ff9-df22-40ec-9c61-095d7d43a302\") " pod="openshift-marketplace/certified-operators-62kl4" Dec 05 18:22:42 crc kubenswrapper[4753]: I1205 18:22:42.352402 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/716b6ff9-df22-40ec-9c61-095d7d43a302-utilities\") pod \"certified-operators-62kl4\" (UID: \"716b6ff9-df22-40ec-9c61-095d7d43a302\") " pod="openshift-marketplace/certified-operators-62kl4" Dec 05 18:22:42 crc kubenswrapper[4753]: I1205 18:22:42.352459 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/716b6ff9-df22-40ec-9c61-095d7d43a302-catalog-content\") pod \"certified-operators-62kl4\" (UID: \"716b6ff9-df22-40ec-9c61-095d7d43a302\") " pod="openshift-marketplace/certified-operators-62kl4" Dec 05 18:22:42 crc kubenswrapper[4753]: I1205 18:22:42.373107 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pswn7\" (UniqueName: \"kubernetes.io/projected/716b6ff9-df22-40ec-9c61-095d7d43a302-kube-api-access-pswn7\") pod \"certified-operators-62kl4\" (UID: \"716b6ff9-df22-40ec-9c61-095d7d43a302\") " pod="openshift-marketplace/certified-operators-62kl4" Dec 05 18:22:42 crc kubenswrapper[4753]: I1205 18:22:42.444333 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_09e6b220-0a70-4359-93f4-4450b2e458c8/openstack-network-exporter/0.log" Dec 05 18:22:42 crc kubenswrapper[4753]: I1205 18:22:42.445313 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-62kl4" Dec 05 18:22:42 crc kubenswrapper[4753]: I1205 18:22:42.495208 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_09e6b220-0a70-4359-93f4-4450b2e458c8/ovsdbserver-sb/0.log" Dec 05 18:22:42 crc kubenswrapper[4753]: I1205 18:22:42.935004 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-56466dc556-cvwd4_2b9f9c99-c7ba-4689-8218-f61fecf29867/placement-api/0.log" Dec 05 18:22:43 crc kubenswrapper[4753]: I1205 18:22:43.024852 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-56466dc556-cvwd4_2b9f9c99-c7ba-4689-8218-f61fecf29867/placement-log/0.log" Dec 05 18:22:43 crc kubenswrapper[4753]: I1205 18:22:43.074583 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-62kl4"] Dec 05 18:22:43 crc kubenswrapper[4753]: I1205 18:22:43.096700 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-62kl4" event={"ID":"716b6ff9-df22-40ec-9c61-095d7d43a302","Type":"ContainerStarted","Data":"37de4168d3305dcd0b6b402eaf472c080ac77668b7cfda7de6f015991fd9f2de"} Dec 05 18:22:43 crc kubenswrapper[4753]: I1205 18:22:43.159413 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_b4a8b3a8-c966-41e1-bb1e-a054e3c3e189/init-config-reloader/0.log" Dec 05 18:22:43 crc kubenswrapper[4753]: I1205 18:22:43.335263 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_b4a8b3a8-c966-41e1-bb1e-a054e3c3e189/init-config-reloader/0.log" Dec 05 18:22:43 crc kubenswrapper[4753]: I1205 18:22:43.377103 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_b4a8b3a8-c966-41e1-bb1e-a054e3c3e189/config-reloader/0.log" Dec 05 18:22:43 crc kubenswrapper[4753]: I1205 18:22:43.467525 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_b4a8b3a8-c966-41e1-bb1e-a054e3c3e189/thanos-sidecar/0.log" Dec 05 18:22:43 crc kubenswrapper[4753]: I1205 18:22:43.469515 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_prometheus-metric-storage-0_b4a8b3a8-c966-41e1-bb1e-a054e3c3e189/prometheus/0.log" Dec 05 18:22:43 crc kubenswrapper[4753]: I1205 18:22:43.614990 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_7a1f2600-cf85-45c5-8263-89810b0ba7ce/setup-container/0.log" Dec 05 18:22:43 crc kubenswrapper[4753]: I1205 18:22:43.804790 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_7a1f2600-cf85-45c5-8263-89810b0ba7ce/setup-container/0.log" Dec 05 18:22:43 crc kubenswrapper[4753]: I1205 18:22:43.846935 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_2bbb2b1a-5cf9-497c-9471-13ba1314167b/setup-container/0.log" Dec 05 18:22:43 crc kubenswrapper[4753]: I1205 18:22:43.902841 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_7a1f2600-cf85-45c5-8263-89810b0ba7ce/rabbitmq/0.log" Dec 05 18:22:44 crc kubenswrapper[4753]: I1205 18:22:44.049398 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_2bbb2b1a-5cf9-497c-9471-13ba1314167b/setup-container/0.log" Dec 05 18:22:44 crc kubenswrapper[4753]: I1205 18:22:44.082923 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_2bbb2b1a-5cf9-497c-9471-13ba1314167b/rabbitmq/0.log" Dec 05 18:22:44 crc kubenswrapper[4753]: I1205 18:22:44.122107 4753 generic.go:334] "Generic (PLEG): container finished" podID="716b6ff9-df22-40ec-9c61-095d7d43a302" containerID="8674dba6742181e4c8a8f69d60351149802eceacd9f1e4e1024a194e819d91e0" exitCode=0 Dec 05 18:22:44 crc kubenswrapper[4753]: I1205 18:22:44.122197 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-62kl4" event={"ID":"716b6ff9-df22-40ec-9c61-095d7d43a302","Type":"ContainerDied","Data":"8674dba6742181e4c8a8f69d60351149802eceacd9f1e4e1024a194e819d91e0"} Dec 05 18:22:44 crc kubenswrapper[4753]: I1205 18:22:44.166848 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-qwwlp_87333285-bec9-4c68-b2f7-307fee899fe4/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 18:22:44 crc kubenswrapper[4753]: I1205 18:22:44.387936 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-k2657_228dc9b7-ee9e-48b8-bf86-f4265863f94a/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 18:22:44 crc kubenswrapper[4753]: I1205 18:22:44.429219 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-vjwb2_d4b7df21-a189-41c8-9e93-c43d0eb552c5/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 18:22:44 crc kubenswrapper[4753]: I1205 18:22:44.596505 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-6z7rw_21886288-cce7-4e89-8c64-e4f06623f8f3/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 18:22:44 crc kubenswrapper[4753]: I1205 18:22:44.660965 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-lwtd6_7e1f8581-434d-4c92-aa86-f76aa242a2e2/ssh-known-hosts-edpm-deployment/0.log" Dec 05 18:22:44 crc kubenswrapper[4753]: I1205 18:22:44.894592 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-6d854f58c-mvlpx_fc5495c5-a8e0-46c4-82c8-be930b187322/proxy-server/0.log" Dec 05 18:22:44 crc kubenswrapper[4753]: I1205 18:22:44.981960 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-6d854f58c-mvlpx_fc5495c5-a8e0-46c4-82c8-be930b187322/proxy-httpd/0.log" Dec 05 18:22:45 crc kubenswrapper[4753]: I1205 18:22:45.000859 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-z4g8x_3ee0c490-5f41-4e0b-8c99-841b1fbbe5e2/swift-ring-rebalance/0.log" Dec 05 18:22:45 crc kubenswrapper[4753]: I1205 18:22:45.141129 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-62kl4" event={"ID":"716b6ff9-df22-40ec-9c61-095d7d43a302","Type":"ContainerStarted","Data":"2781a2e6d684b481d79393592fe10e131f7e2b1711118afe5519225351df8ec4"} Dec 05 18:22:45 crc kubenswrapper[4753]: I1205 18:22:45.267870 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f4707e97-4f70-42d5-959e-1d2c8a9629e5/account-reaper/0.log" Dec 05 18:22:45 crc kubenswrapper[4753]: I1205 18:22:45.290643 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f4707e97-4f70-42d5-959e-1d2c8a9629e5/account-auditor/0.log" Dec 05 18:22:45 crc kubenswrapper[4753]: I1205 18:22:45.329517 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f4707e97-4f70-42d5-959e-1d2c8a9629e5/account-replicator/0.log" Dec 05 18:22:45 crc kubenswrapper[4753]: I1205 18:22:45.502845 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f4707e97-4f70-42d5-959e-1d2c8a9629e5/account-server/0.log" Dec 05 18:22:45 crc kubenswrapper[4753]: I1205 18:22:45.547388 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f4707e97-4f70-42d5-959e-1d2c8a9629e5/container-auditor/0.log" Dec 05 18:22:45 crc kubenswrapper[4753]: I1205 18:22:45.598998 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f4707e97-4f70-42d5-959e-1d2c8a9629e5/container-replicator/0.log" Dec 05 18:22:45 crc kubenswrapper[4753]: I1205 18:22:45.676682 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f4707e97-4f70-42d5-959e-1d2c8a9629e5/container-server/0.log" Dec 05 18:22:45 crc kubenswrapper[4753]: I1205 18:22:45.685495 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f4707e97-4f70-42d5-959e-1d2c8a9629e5/container-updater/0.log" Dec 05 18:22:45 crc kubenswrapper[4753]: I1205 18:22:45.792306 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f4707e97-4f70-42d5-959e-1d2c8a9629e5/object-auditor/0.log" Dec 05 18:22:45 crc kubenswrapper[4753]: I1205 18:22:45.822878 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f4707e97-4f70-42d5-959e-1d2c8a9629e5/object-expirer/0.log" Dec 05 18:22:45 crc kubenswrapper[4753]: I1205 18:22:45.907441 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f4707e97-4f70-42d5-959e-1d2c8a9629e5/object-replicator/0.log" Dec 05 18:22:45 crc kubenswrapper[4753]: I1205 18:22:45.937609 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f4707e97-4f70-42d5-959e-1d2c8a9629e5/object-server/0.log" Dec 05 18:22:46 crc kubenswrapper[4753]: I1205 18:22:46.024680 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f4707e97-4f70-42d5-959e-1d2c8a9629e5/object-updater/0.log" Dec 05 18:22:46 crc kubenswrapper[4753]: I1205 18:22:46.046869 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f4707e97-4f70-42d5-959e-1d2c8a9629e5/rsync/0.log" Dec 05 18:22:46 crc kubenswrapper[4753]: I1205 18:22:46.108548 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_f4707e97-4f70-42d5-959e-1d2c8a9629e5/swift-recon-cron/0.log" Dec 05 18:22:46 crc kubenswrapper[4753]: I1205 18:22:46.166622 4753 generic.go:334] "Generic (PLEG): container finished" podID="716b6ff9-df22-40ec-9c61-095d7d43a302" containerID="2781a2e6d684b481d79393592fe10e131f7e2b1711118afe5519225351df8ec4" exitCode=0 Dec 05 18:22:46 crc kubenswrapper[4753]: I1205 18:22:46.166869 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-62kl4" event={"ID":"716b6ff9-df22-40ec-9c61-095d7d43a302","Type":"ContainerDied","Data":"2781a2e6d684b481d79393592fe10e131f7e2b1711118afe5519225351df8ec4"} Dec 05 18:22:46 crc kubenswrapper[4753]: I1205 18:22:46.343176 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-bq7tl_9b316eb5-2fa8-4582-afdd-0b94dfe44a6f/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 18:22:46 crc kubenswrapper[4753]: I1205 18:22:46.422216 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_230fdd26-f37e-4a32-a261-efdb39dc8de2/tempest-tests-tempest-tests-runner/0.log" Dec 05 18:22:46 crc kubenswrapper[4753]: I1205 18:22:46.558412 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_2c8ba284-0ce6-4fd1-af48-9953b5b14d55/test-operator-logs-container/0.log" Dec 05 18:22:46 crc kubenswrapper[4753]: I1205 18:22:46.682218 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-ngt2w_c239ce8f-d247-46bb-889b-914ff6f8ab64/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 18:22:47 crc kubenswrapper[4753]: I1205 18:22:47.181754 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-62kl4" event={"ID":"716b6ff9-df22-40ec-9c61-095d7d43a302","Type":"ContainerStarted","Data":"c9112a544289a4006df824305ba8f04a295d445c83710039afde5a5ad600c075"} Dec 05 18:22:47 crc kubenswrapper[4753]: I1205 18:22:47.203527 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-62kl4" podStartSLOduration=2.798415984 podStartE2EDuration="5.203511566s" podCreationTimestamp="2025-12-05 18:22:42 +0000 UTC" firstStartedPulling="2025-12-05 18:22:44.125181519 +0000 UTC m=+4702.628288525" lastFinishedPulling="2025-12-05 18:22:46.530277101 +0000 UTC m=+4705.033384107" observedRunningTime="2025-12-05 18:22:47.197766623 +0000 UTC m=+4705.700873629" watchObservedRunningTime="2025-12-05 18:22:47.203511566 +0000 UTC m=+4705.706618572" Dec 05 18:22:47 crc kubenswrapper[4753]: I1205 18:22:47.720519 4753 scope.go:117] "RemoveContainer" containerID="cc0f1cc17aa951c21ac505d180d094261b7dca04b8a1e989424ff5d5fd93fc78" Dec 05 18:22:47 crc kubenswrapper[4753]: E1205 18:22:47.721005 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 18:22:52 crc kubenswrapper[4753]: I1205 18:22:52.393226 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_8978936d-d71e-4840-9cc4-666746ebeecf/memcached/0.log" Dec 05 18:22:52 crc kubenswrapper[4753]: I1205 18:22:52.446012 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-62kl4" Dec 05 18:22:52 crc kubenswrapper[4753]: I1205 18:22:52.446282 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-62kl4" Dec 05 18:22:52 crc kubenswrapper[4753]: I1205 18:22:52.502487 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-62kl4" Dec 05 18:22:53 crc kubenswrapper[4753]: I1205 18:22:53.287973 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-62kl4" Dec 05 18:22:53 crc kubenswrapper[4753]: I1205 18:22:53.331063 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-62kl4"] Dec 05 18:22:55 crc kubenswrapper[4753]: I1205 18:22:55.254530 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-62kl4" podUID="716b6ff9-df22-40ec-9c61-095d7d43a302" containerName="registry-server" containerID="cri-o://c9112a544289a4006df824305ba8f04a295d445c83710039afde5a5ad600c075" gracePeriod=2 Dec 05 18:22:56 crc kubenswrapper[4753]: I1205 18:22:56.266461 4753 generic.go:334] "Generic (PLEG): container finished" podID="716b6ff9-df22-40ec-9c61-095d7d43a302" containerID="c9112a544289a4006df824305ba8f04a295d445c83710039afde5a5ad600c075" exitCode=0 Dec 05 18:22:56 crc kubenswrapper[4753]: I1205 18:22:56.266507 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-62kl4" event={"ID":"716b6ff9-df22-40ec-9c61-095d7d43a302","Type":"ContainerDied","Data":"c9112a544289a4006df824305ba8f04a295d445c83710039afde5a5ad600c075"} Dec 05 18:22:56 crc kubenswrapper[4753]: I1205 18:22:56.266832 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-62kl4" event={"ID":"716b6ff9-df22-40ec-9c61-095d7d43a302","Type":"ContainerDied","Data":"37de4168d3305dcd0b6b402eaf472c080ac77668b7cfda7de6f015991fd9f2de"} Dec 05 18:22:56 crc kubenswrapper[4753]: I1205 18:22:56.266869 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="37de4168d3305dcd0b6b402eaf472c080ac77668b7cfda7de6f015991fd9f2de" Dec 05 18:22:56 crc kubenswrapper[4753]: I1205 18:22:56.368398 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-62kl4" Dec 05 18:22:56 crc kubenswrapper[4753]: I1205 18:22:56.518926 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/716b6ff9-df22-40ec-9c61-095d7d43a302-utilities\") pod \"716b6ff9-df22-40ec-9c61-095d7d43a302\" (UID: \"716b6ff9-df22-40ec-9c61-095d7d43a302\") " Dec 05 18:22:56 crc kubenswrapper[4753]: I1205 18:22:56.519419 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pswn7\" (UniqueName: \"kubernetes.io/projected/716b6ff9-df22-40ec-9c61-095d7d43a302-kube-api-access-pswn7\") pod \"716b6ff9-df22-40ec-9c61-095d7d43a302\" (UID: \"716b6ff9-df22-40ec-9c61-095d7d43a302\") " Dec 05 18:22:56 crc kubenswrapper[4753]: I1205 18:22:56.519450 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/716b6ff9-df22-40ec-9c61-095d7d43a302-catalog-content\") pod \"716b6ff9-df22-40ec-9c61-095d7d43a302\" (UID: \"716b6ff9-df22-40ec-9c61-095d7d43a302\") " Dec 05 18:22:56 crc kubenswrapper[4753]: I1205 18:22:56.519683 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/716b6ff9-df22-40ec-9c61-095d7d43a302-utilities" (OuterVolumeSpecName: "utilities") pod "716b6ff9-df22-40ec-9c61-095d7d43a302" (UID: "716b6ff9-df22-40ec-9c61-095d7d43a302"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 18:22:56 crc kubenswrapper[4753]: I1205 18:22:56.519970 4753 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/716b6ff9-df22-40ec-9c61-095d7d43a302-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 18:22:56 crc kubenswrapper[4753]: I1205 18:22:56.524931 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/716b6ff9-df22-40ec-9c61-095d7d43a302-kube-api-access-pswn7" (OuterVolumeSpecName: "kube-api-access-pswn7") pod "716b6ff9-df22-40ec-9c61-095d7d43a302" (UID: "716b6ff9-df22-40ec-9c61-095d7d43a302"). InnerVolumeSpecName "kube-api-access-pswn7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 18:22:56 crc kubenswrapper[4753]: I1205 18:22:56.567248 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/716b6ff9-df22-40ec-9c61-095d7d43a302-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "716b6ff9-df22-40ec-9c61-095d7d43a302" (UID: "716b6ff9-df22-40ec-9c61-095d7d43a302"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 18:22:56 crc kubenswrapper[4753]: I1205 18:22:56.621356 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pswn7\" (UniqueName: \"kubernetes.io/projected/716b6ff9-df22-40ec-9c61-095d7d43a302-kube-api-access-pswn7\") on node \"crc\" DevicePath \"\"" Dec 05 18:22:56 crc kubenswrapper[4753]: I1205 18:22:56.621394 4753 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/716b6ff9-df22-40ec-9c61-095d7d43a302-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 18:22:57 crc kubenswrapper[4753]: I1205 18:22:57.274787 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-62kl4" Dec 05 18:22:57 crc kubenswrapper[4753]: I1205 18:22:57.309540 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-62kl4"] Dec 05 18:22:57 crc kubenswrapper[4753]: I1205 18:22:57.320920 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-62kl4"] Dec 05 18:22:57 crc kubenswrapper[4753]: I1205 18:22:57.741465 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="716b6ff9-df22-40ec-9c61-095d7d43a302" path="/var/lib/kubelet/pods/716b6ff9-df22-40ec-9c61-095d7d43a302/volumes" Dec 05 18:22:59 crc kubenswrapper[4753]: I1205 18:22:59.720618 4753 scope.go:117] "RemoveContainer" containerID="cc0f1cc17aa951c21ac505d180d094261b7dca04b8a1e989424ff5d5fd93fc78" Dec 05 18:22:59 crc kubenswrapper[4753]: E1205 18:22:59.721217 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 18:23:12 crc kubenswrapper[4753]: I1205 18:23:12.720928 4753 scope.go:117] "RemoveContainer" containerID="cc0f1cc17aa951c21ac505d180d094261b7dca04b8a1e989424ff5d5fd93fc78" Dec 05 18:23:12 crc kubenswrapper[4753]: E1205 18:23:12.721610 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 18:23:16 crc kubenswrapper[4753]: I1205 18:23:16.665867 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_10518271d11356773275a244d183abb577b9dea821837b29079f1397f09p4qq_e51268e8-feb1-4dcd-8eeb-4e81cc1ced69/util/0.log" Dec 05 18:23:16 crc kubenswrapper[4753]: I1205 18:23:16.869770 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_10518271d11356773275a244d183abb577b9dea821837b29079f1397f09p4qq_e51268e8-feb1-4dcd-8eeb-4e81cc1ced69/util/0.log" Dec 05 18:23:16 crc kubenswrapper[4753]: I1205 18:23:16.895821 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_10518271d11356773275a244d183abb577b9dea821837b29079f1397f09p4qq_e51268e8-feb1-4dcd-8eeb-4e81cc1ced69/pull/0.log" Dec 05 18:23:16 crc kubenswrapper[4753]: I1205 18:23:16.923206 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_10518271d11356773275a244d183abb577b9dea821837b29079f1397f09p4qq_e51268e8-feb1-4dcd-8eeb-4e81cc1ced69/pull/0.log" Dec 05 18:23:17 crc kubenswrapper[4753]: I1205 18:23:17.043029 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_10518271d11356773275a244d183abb577b9dea821837b29079f1397f09p4qq_e51268e8-feb1-4dcd-8eeb-4e81cc1ced69/util/0.log" Dec 05 18:23:17 crc kubenswrapper[4753]: I1205 18:23:17.078414 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_10518271d11356773275a244d183abb577b9dea821837b29079f1397f09p4qq_e51268e8-feb1-4dcd-8eeb-4e81cc1ced69/pull/0.log" Dec 05 18:23:17 crc kubenswrapper[4753]: I1205 18:23:17.102976 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_10518271d11356773275a244d183abb577b9dea821837b29079f1397f09p4qq_e51268e8-feb1-4dcd-8eeb-4e81cc1ced69/extract/0.log" Dec 05 18:23:17 crc kubenswrapper[4753]: I1205 18:23:17.206060 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-k9647_304f4f1a-f42b-4904-9a77-9e26600eb591/kube-rbac-proxy/0.log" Dec 05 18:23:17 crc kubenswrapper[4753]: I1205 18:23:17.340591 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-pkzrf_6afd28dd-749e-409b-93ec-30cd85573a95/kube-rbac-proxy/0.log" Dec 05 18:23:17 crc kubenswrapper[4753]: I1205 18:23:17.361092 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-k9647_304f4f1a-f42b-4904-9a77-9e26600eb591/manager/0.log" Dec 05 18:23:17 crc kubenswrapper[4753]: I1205 18:23:17.434498 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-pkzrf_6afd28dd-749e-409b-93ec-30cd85573a95/manager/0.log" Dec 05 18:23:17 crc kubenswrapper[4753]: I1205 18:23:17.536637 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-hhh6x_a5f4bc41-be86-43bd-b9af-d8d8cfac644e/manager/0.log" Dec 05 18:23:17 crc kubenswrapper[4753]: I1205 18:23:17.540480 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-hhh6x_a5f4bc41-be86-43bd-b9af-d8d8cfac644e/kube-rbac-proxy/0.log" Dec 05 18:23:17 crc kubenswrapper[4753]: I1205 18:23:17.823862 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-g7c45_e485d825-a020-45b2-a642-bba12e1a5112/manager/0.log" Dec 05 18:23:17 crc kubenswrapper[4753]: I1205 18:23:17.836824 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-g7c45_e485d825-a020-45b2-a642-bba12e1a5112/kube-rbac-proxy/0.log" Dec 05 18:23:18 crc kubenswrapper[4753]: I1205 18:23:18.667510 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-n94z9_9472f12d-6c74-422c-8bc9-76a2ca161b77/kube-rbac-proxy/0.log" Dec 05 18:23:18 crc kubenswrapper[4753]: I1205 18:23:18.755000 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-n94z9_9472f12d-6c74-422c-8bc9-76a2ca161b77/manager/0.log" Dec 05 18:23:18 crc kubenswrapper[4753]: I1205 18:23:18.756383 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-9jsb5_9712e3f3-fe07-4f19-b04f-6736375fd440/kube-rbac-proxy/0.log" Dec 05 18:23:18 crc kubenswrapper[4753]: I1205 18:23:18.955975 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-9jsb5_9712e3f3-fe07-4f19-b04f-6736375fd440/manager/0.log" Dec 05 18:23:19 crc kubenswrapper[4753]: I1205 18:23:19.043040 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-j6jtq_e2bab632-3631-4dcf-b337-12982b375999/kube-rbac-proxy/0.log" Dec 05 18:23:19 crc kubenswrapper[4753]: I1205 18:23:19.131720 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-j6jtq_e2bab632-3631-4dcf-b337-12982b375999/manager/0.log" Dec 05 18:23:19 crc kubenswrapper[4753]: I1205 18:23:19.167077 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-kj4zh_2d850458-6add-4a44-b1c6-7dba1e8993ab/kube-rbac-proxy/0.log" Dec 05 18:23:19 crc kubenswrapper[4753]: I1205 18:23:19.247909 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-kj4zh_2d850458-6add-4a44-b1c6-7dba1e8993ab/manager/0.log" Dec 05 18:23:19 crc kubenswrapper[4753]: I1205 18:23:19.345168 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-s6759_a3db9b5b-a4b2-40f5-93a0-84ecb72c1c2e/kube-rbac-proxy/0.log" Dec 05 18:23:19 crc kubenswrapper[4753]: I1205 18:23:19.426732 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-s6759_a3db9b5b-a4b2-40f5-93a0-84ecb72c1c2e/manager/0.log" Dec 05 18:23:19 crc kubenswrapper[4753]: I1205 18:23:19.551613 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-qpd7t_0d335585-bcd8-4ddf-a693-421d6d3bf6d2/manager/0.log" Dec 05 18:23:19 crc kubenswrapper[4753]: I1205 18:23:19.585930 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-qpd7t_0d335585-bcd8-4ddf-a693-421d6d3bf6d2/kube-rbac-proxy/0.log" Dec 05 18:23:19 crc kubenswrapper[4753]: I1205 18:23:19.621271 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-s2rgf_85fd7687-c296-460a-a2b2-3da36c97efe6/kube-rbac-proxy/0.log" Dec 05 18:23:19 crc kubenswrapper[4753]: I1205 18:23:19.750644 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-s2rgf_85fd7687-c296-460a-a2b2-3da36c97efe6/manager/0.log" Dec 05 18:23:19 crc kubenswrapper[4753]: I1205 18:23:19.826090 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-vcc5x_09be61f0-7174-4035-a8f9-315ca512dea4/manager/0.log" Dec 05 18:23:19 crc kubenswrapper[4753]: I1205 18:23:19.849073 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-vcc5x_09be61f0-7174-4035-a8f9-315ca512dea4/kube-rbac-proxy/0.log" Dec 05 18:23:19 crc kubenswrapper[4753]: I1205 18:23:19.976857 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-hf596_a4590a35-52c3-45a7-ba18-81d2db73c384/kube-rbac-proxy/0.log" Dec 05 18:23:20 crc kubenswrapper[4753]: I1205 18:23:20.038955 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-hf596_a4590a35-52c3-45a7-ba18-81d2db73c384/manager/0.log" Dec 05 18:23:20 crc kubenswrapper[4753]: I1205 18:23:20.065845 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-72b7n_2f396259-4eaa-465d-9674-9999d750b1f6/kube-rbac-proxy/0.log" Dec 05 18:23:20 crc kubenswrapper[4753]: I1205 18:23:20.167250 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-72b7n_2f396259-4eaa-465d-9674-9999d750b1f6/manager/0.log" Dec 05 18:23:20 crc kubenswrapper[4753]: I1205 18:23:20.235856 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd462vzf_269f75ec-a232-47f3-8cc8-e9e4c8e9717d/kube-rbac-proxy/0.log" Dec 05 18:23:20 crc kubenswrapper[4753]: I1205 18:23:20.250274 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd462vzf_269f75ec-a232-47f3-8cc8-e9e4c8e9717d/manager/0.log" Dec 05 18:23:20 crc kubenswrapper[4753]: I1205 18:23:20.508956 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-5blfl_46c0a8e5-1a5d-4fd6-bf89-a91826a0b99f/registry-server/0.log" Dec 05 18:23:20 crc kubenswrapper[4753]: I1205 18:23:20.644139 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-5958697dc4-d8ztf_a970c0f5-9ad9-4bf9-b93b-5f6b72cca4ff/operator/0.log" Dec 05 18:23:20 crc kubenswrapper[4753]: I1205 18:23:20.709639 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-dhjvc_8e53ed9e-05c6-4a84-894d-85f427a53f72/kube-rbac-proxy/0.log" Dec 05 18:23:20 crc kubenswrapper[4753]: I1205 18:23:20.774503 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-dhjvc_8e53ed9e-05c6-4a84-894d-85f427a53f72/manager/0.log" Dec 05 18:23:20 crc kubenswrapper[4753]: I1205 18:23:20.895898 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-l5jmt_ebc7e3cf-3701-42f9-a6ca-43f11424a0b3/kube-rbac-proxy/0.log" Dec 05 18:23:21 crc kubenswrapper[4753]: I1205 18:23:21.021634 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-l5jmt_ebc7e3cf-3701-42f9-a6ca-43f11424a0b3/manager/0.log" Dec 05 18:23:21 crc kubenswrapper[4753]: I1205 18:23:21.176285 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-zl7c6_cca913d2-4dbb-4cd8-8575-4af52cc95501/operator/0.log" Dec 05 18:23:21 crc kubenswrapper[4753]: I1205 18:23:21.285115 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-8fdbn_9e8cbdbf-2604-4e04-a56a-6f2175c09abe/kube-rbac-proxy/0.log" Dec 05 18:23:21 crc kubenswrapper[4753]: I1205 18:23:21.421612 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-8fdbn_9e8cbdbf-2604-4e04-a56a-6f2175c09abe/manager/0.log" Dec 05 18:23:21 crc kubenswrapper[4753]: I1205 18:23:21.494582 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-6dd6c4f769-lq2pn_ac0841fe-3cba-4397-bfec-67a9cbec6861/manager/0.log" Dec 05 18:23:21 crc kubenswrapper[4753]: I1205 18:23:21.510514 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-75c997498-r5zvk_41186805-7b90-44a5-b6d6-fe4b6b4d9a79/kube-rbac-proxy/0.log" Dec 05 18:23:21 crc kubenswrapper[4753]: I1205 18:23:21.648591 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-p5j4k_444bb95c-a503-40a1-a99e-64d04b3c8930/kube-rbac-proxy/0.log" Dec 05 18:23:21 crc kubenswrapper[4753]: I1205 18:23:21.711316 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-p5j4k_444bb95c-a503-40a1-a99e-64d04b3c8930/manager/0.log" Dec 05 18:23:21 crc kubenswrapper[4753]: I1205 18:23:21.875951 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-7wzfq_7b847f69-4008-4acf-bddf-e0ea5a07b6bd/kube-rbac-proxy/0.log" Dec 05 18:23:21 crc kubenswrapper[4753]: I1205 18:23:21.880028 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-7wzfq_7b847f69-4008-4acf-bddf-e0ea5a07b6bd/manager/0.log" Dec 05 18:23:21 crc kubenswrapper[4753]: I1205 18:23:21.938645 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-75c997498-r5zvk_41186805-7b90-44a5-b6d6-fe4b6b4d9a79/manager/0.log" Dec 05 18:23:23 crc kubenswrapper[4753]: I1205 18:23:23.720905 4753 scope.go:117] "RemoveContainer" containerID="cc0f1cc17aa951c21ac505d180d094261b7dca04b8a1e989424ff5d5fd93fc78" Dec 05 18:23:23 crc kubenswrapper[4753]: E1205 18:23:23.721434 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 18:23:34 crc kubenswrapper[4753]: I1205 18:23:34.720934 4753 scope.go:117] "RemoveContainer" containerID="cc0f1cc17aa951c21ac505d180d094261b7dca04b8a1e989424ff5d5fd93fc78" Dec 05 18:23:34 crc kubenswrapper[4753]: E1205 18:23:34.721709 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 18:23:45 crc kubenswrapper[4753]: I1205 18:23:45.753421 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-pcb9b_27067136-5ba5-407b-a4a5-4d1e8c284564/control-plane-machine-set-operator/0.log" Dec 05 18:23:46 crc kubenswrapper[4753]: I1205 18:23:46.028438 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-vx9s2_5cd4f96b-673e-4518-a8ee-da3ccb7a86b0/kube-rbac-proxy/0.log" Dec 05 18:23:46 crc kubenswrapper[4753]: I1205 18:23:46.173662 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-vx9s2_5cd4f96b-673e-4518-a8ee-da3ccb7a86b0/machine-api-operator/0.log" Dec 05 18:23:46 crc kubenswrapper[4753]: I1205 18:23:46.720687 4753 scope.go:117] "RemoveContainer" containerID="cc0f1cc17aa951c21ac505d180d094261b7dca04b8a1e989424ff5d5fd93fc78" Dec 05 18:23:46 crc kubenswrapper[4753]: E1205 18:23:46.721112 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 18:23:52 crc kubenswrapper[4753]: I1205 18:23:52.198493 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-2q567"] Dec 05 18:23:52 crc kubenswrapper[4753]: E1205 18:23:52.199558 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="716b6ff9-df22-40ec-9c61-095d7d43a302" containerName="extract-utilities" Dec 05 18:23:52 crc kubenswrapper[4753]: I1205 18:23:52.199574 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="716b6ff9-df22-40ec-9c61-095d7d43a302" containerName="extract-utilities" Dec 05 18:23:52 crc kubenswrapper[4753]: E1205 18:23:52.199600 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="716b6ff9-df22-40ec-9c61-095d7d43a302" containerName="registry-server" Dec 05 18:23:52 crc kubenswrapper[4753]: I1205 18:23:52.199609 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="716b6ff9-df22-40ec-9c61-095d7d43a302" containerName="registry-server" Dec 05 18:23:52 crc kubenswrapper[4753]: E1205 18:23:52.199645 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="716b6ff9-df22-40ec-9c61-095d7d43a302" containerName="extract-content" Dec 05 18:23:52 crc kubenswrapper[4753]: I1205 18:23:52.199653 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="716b6ff9-df22-40ec-9c61-095d7d43a302" containerName="extract-content" Dec 05 18:23:52 crc kubenswrapper[4753]: I1205 18:23:52.199949 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="716b6ff9-df22-40ec-9c61-095d7d43a302" containerName="registry-server" Dec 05 18:23:52 crc kubenswrapper[4753]: I1205 18:23:52.202199 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2q567" Dec 05 18:23:52 crc kubenswrapper[4753]: I1205 18:23:52.226058 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2q567"] Dec 05 18:23:52 crc kubenswrapper[4753]: I1205 18:23:52.334677 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f123c5ee-338e-4af3-b1c7-6907b0dd7966-catalog-content\") pod \"community-operators-2q567\" (UID: \"f123c5ee-338e-4af3-b1c7-6907b0dd7966\") " pod="openshift-marketplace/community-operators-2q567" Dec 05 18:23:52 crc kubenswrapper[4753]: I1205 18:23:52.334763 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f123c5ee-338e-4af3-b1c7-6907b0dd7966-utilities\") pod \"community-operators-2q567\" (UID: \"f123c5ee-338e-4af3-b1c7-6907b0dd7966\") " pod="openshift-marketplace/community-operators-2q567" Dec 05 18:23:52 crc kubenswrapper[4753]: I1205 18:23:52.335265 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x8bhz\" (UniqueName: \"kubernetes.io/projected/f123c5ee-338e-4af3-b1c7-6907b0dd7966-kube-api-access-x8bhz\") pod \"community-operators-2q567\" (UID: \"f123c5ee-338e-4af3-b1c7-6907b0dd7966\") " pod="openshift-marketplace/community-operators-2q567" Dec 05 18:23:52 crc kubenswrapper[4753]: I1205 18:23:52.437595 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f123c5ee-338e-4af3-b1c7-6907b0dd7966-catalog-content\") pod \"community-operators-2q567\" (UID: \"f123c5ee-338e-4af3-b1c7-6907b0dd7966\") " pod="openshift-marketplace/community-operators-2q567" Dec 05 18:23:52 crc kubenswrapper[4753]: I1205 18:23:52.437668 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f123c5ee-338e-4af3-b1c7-6907b0dd7966-utilities\") pod \"community-operators-2q567\" (UID: \"f123c5ee-338e-4af3-b1c7-6907b0dd7966\") " pod="openshift-marketplace/community-operators-2q567" Dec 05 18:23:52 crc kubenswrapper[4753]: I1205 18:23:52.437773 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x8bhz\" (UniqueName: \"kubernetes.io/projected/f123c5ee-338e-4af3-b1c7-6907b0dd7966-kube-api-access-x8bhz\") pod \"community-operators-2q567\" (UID: \"f123c5ee-338e-4af3-b1c7-6907b0dd7966\") " pod="openshift-marketplace/community-operators-2q567" Dec 05 18:23:52 crc kubenswrapper[4753]: I1205 18:23:52.438535 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f123c5ee-338e-4af3-b1c7-6907b0dd7966-catalog-content\") pod \"community-operators-2q567\" (UID: \"f123c5ee-338e-4af3-b1c7-6907b0dd7966\") " pod="openshift-marketplace/community-operators-2q567" Dec 05 18:23:52 crc kubenswrapper[4753]: I1205 18:23:52.438746 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f123c5ee-338e-4af3-b1c7-6907b0dd7966-utilities\") pod \"community-operators-2q567\" (UID: \"f123c5ee-338e-4af3-b1c7-6907b0dd7966\") " pod="openshift-marketplace/community-operators-2q567" Dec 05 18:23:52 crc kubenswrapper[4753]: I1205 18:23:52.463958 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x8bhz\" (UniqueName: \"kubernetes.io/projected/f123c5ee-338e-4af3-b1c7-6907b0dd7966-kube-api-access-x8bhz\") pod \"community-operators-2q567\" (UID: \"f123c5ee-338e-4af3-b1c7-6907b0dd7966\") " pod="openshift-marketplace/community-operators-2q567" Dec 05 18:23:52 crc kubenswrapper[4753]: I1205 18:23:52.527329 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2q567" Dec 05 18:23:53 crc kubenswrapper[4753]: I1205 18:23:53.119695 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2q567"] Dec 05 18:23:53 crc kubenswrapper[4753]: I1205 18:23:53.871566 4753 generic.go:334] "Generic (PLEG): container finished" podID="f123c5ee-338e-4af3-b1c7-6907b0dd7966" containerID="94f5bfb713405d50afe1d63a7c97be084cd8943dedd934d73be5113c657ca584" exitCode=0 Dec 05 18:23:53 crc kubenswrapper[4753]: I1205 18:23:53.871663 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2q567" event={"ID":"f123c5ee-338e-4af3-b1c7-6907b0dd7966","Type":"ContainerDied","Data":"94f5bfb713405d50afe1d63a7c97be084cd8943dedd934d73be5113c657ca584"} Dec 05 18:23:53 crc kubenswrapper[4753]: I1205 18:23:53.871875 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2q567" event={"ID":"f123c5ee-338e-4af3-b1c7-6907b0dd7966","Type":"ContainerStarted","Data":"25d46ccb38392bc730a3607d4ba51daa70397f9677f69ec6c719cb2c2af73f03"} Dec 05 18:23:54 crc kubenswrapper[4753]: I1205 18:23:54.883837 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2q567" event={"ID":"f123c5ee-338e-4af3-b1c7-6907b0dd7966","Type":"ContainerStarted","Data":"e18c38c950286a05432e05a937bd9208bb4622f6a30dfd4db88197d4d544cf80"} Dec 05 18:23:55 crc kubenswrapper[4753]: I1205 18:23:55.892533 4753 generic.go:334] "Generic (PLEG): container finished" podID="f123c5ee-338e-4af3-b1c7-6907b0dd7966" containerID="e18c38c950286a05432e05a937bd9208bb4622f6a30dfd4db88197d4d544cf80" exitCode=0 Dec 05 18:23:55 crc kubenswrapper[4753]: I1205 18:23:55.892682 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2q567" event={"ID":"f123c5ee-338e-4af3-b1c7-6907b0dd7966","Type":"ContainerDied","Data":"e18c38c950286a05432e05a937bd9208bb4622f6a30dfd4db88197d4d544cf80"} Dec 05 18:23:56 crc kubenswrapper[4753]: I1205 18:23:56.904145 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2q567" event={"ID":"f123c5ee-338e-4af3-b1c7-6907b0dd7966","Type":"ContainerStarted","Data":"3e7e909d32e0a02da5e75377925fbf7ee78eff33a7e417b69b38b45596f08023"} Dec 05 18:23:56 crc kubenswrapper[4753]: I1205 18:23:56.930572 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-2q567" podStartSLOduration=2.507186357 podStartE2EDuration="4.930551558s" podCreationTimestamp="2025-12-05 18:23:52 +0000 UTC" firstStartedPulling="2025-12-05 18:23:53.873237586 +0000 UTC m=+4772.376344602" lastFinishedPulling="2025-12-05 18:23:56.296602797 +0000 UTC m=+4774.799709803" observedRunningTime="2025-12-05 18:23:56.92675755 +0000 UTC m=+4775.429864566" watchObservedRunningTime="2025-12-05 18:23:56.930551558 +0000 UTC m=+4775.433658584" Dec 05 18:23:59 crc kubenswrapper[4753]: I1205 18:23:59.720904 4753 scope.go:117] "RemoveContainer" containerID="cc0f1cc17aa951c21ac505d180d094261b7dca04b8a1e989424ff5d5fd93fc78" Dec 05 18:23:59 crc kubenswrapper[4753]: E1205 18:23:59.722003 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 18:24:01 crc kubenswrapper[4753]: I1205 18:24:01.167499 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-mt28j_2406b66f-c017-41be-b22e-0a1b748b2fff/cert-manager-controller/0.log" Dec 05 18:24:01 crc kubenswrapper[4753]: I1205 18:24:01.382319 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-nqxn4_07264007-14c8-49d7-b4f7-ee34bad54bca/cert-manager-cainjector/0.log" Dec 05 18:24:01 crc kubenswrapper[4753]: I1205 18:24:01.417012 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-scp9f_62c975a0-bab2-45d8-9b51-d4bbdf2a5ea6/cert-manager-webhook/0.log" Dec 05 18:24:02 crc kubenswrapper[4753]: I1205 18:24:02.527904 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-2q567" Dec 05 18:24:02 crc kubenswrapper[4753]: I1205 18:24:02.530339 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-2q567" Dec 05 18:24:02 crc kubenswrapper[4753]: I1205 18:24:02.597766 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-2q567" Dec 05 18:24:03 crc kubenswrapper[4753]: I1205 18:24:03.044116 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-2q567" Dec 05 18:24:03 crc kubenswrapper[4753]: I1205 18:24:03.104245 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2q567"] Dec 05 18:24:05 crc kubenswrapper[4753]: I1205 18:24:05.005773 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-2q567" podUID="f123c5ee-338e-4af3-b1c7-6907b0dd7966" containerName="registry-server" containerID="cri-o://3e7e909d32e0a02da5e75377925fbf7ee78eff33a7e417b69b38b45596f08023" gracePeriod=2 Dec 05 18:24:05 crc kubenswrapper[4753]: I1205 18:24:05.621686 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2q567" Dec 05 18:24:05 crc kubenswrapper[4753]: I1205 18:24:05.722336 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x8bhz\" (UniqueName: \"kubernetes.io/projected/f123c5ee-338e-4af3-b1c7-6907b0dd7966-kube-api-access-x8bhz\") pod \"f123c5ee-338e-4af3-b1c7-6907b0dd7966\" (UID: \"f123c5ee-338e-4af3-b1c7-6907b0dd7966\") " Dec 05 18:24:05 crc kubenswrapper[4753]: I1205 18:24:05.722566 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f123c5ee-338e-4af3-b1c7-6907b0dd7966-utilities\") pod \"f123c5ee-338e-4af3-b1c7-6907b0dd7966\" (UID: \"f123c5ee-338e-4af3-b1c7-6907b0dd7966\") " Dec 05 18:24:05 crc kubenswrapper[4753]: I1205 18:24:05.722655 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f123c5ee-338e-4af3-b1c7-6907b0dd7966-catalog-content\") pod \"f123c5ee-338e-4af3-b1c7-6907b0dd7966\" (UID: \"f123c5ee-338e-4af3-b1c7-6907b0dd7966\") " Dec 05 18:24:05 crc kubenswrapper[4753]: I1205 18:24:05.723805 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f123c5ee-338e-4af3-b1c7-6907b0dd7966-utilities" (OuterVolumeSpecName: "utilities") pod "f123c5ee-338e-4af3-b1c7-6907b0dd7966" (UID: "f123c5ee-338e-4af3-b1c7-6907b0dd7966"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 18:24:05 crc kubenswrapper[4753]: I1205 18:24:05.728486 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f123c5ee-338e-4af3-b1c7-6907b0dd7966-kube-api-access-x8bhz" (OuterVolumeSpecName: "kube-api-access-x8bhz") pod "f123c5ee-338e-4af3-b1c7-6907b0dd7966" (UID: "f123c5ee-338e-4af3-b1c7-6907b0dd7966"). InnerVolumeSpecName "kube-api-access-x8bhz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 18:24:05 crc kubenswrapper[4753]: I1205 18:24:05.788934 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f123c5ee-338e-4af3-b1c7-6907b0dd7966-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f123c5ee-338e-4af3-b1c7-6907b0dd7966" (UID: "f123c5ee-338e-4af3-b1c7-6907b0dd7966"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 18:24:05 crc kubenswrapper[4753]: I1205 18:24:05.824776 4753 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f123c5ee-338e-4af3-b1c7-6907b0dd7966-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 18:24:05 crc kubenswrapper[4753]: I1205 18:24:05.824806 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x8bhz\" (UniqueName: \"kubernetes.io/projected/f123c5ee-338e-4af3-b1c7-6907b0dd7966-kube-api-access-x8bhz\") on node \"crc\" DevicePath \"\"" Dec 05 18:24:05 crc kubenswrapper[4753]: I1205 18:24:05.824818 4753 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f123c5ee-338e-4af3-b1c7-6907b0dd7966-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 18:24:06 crc kubenswrapper[4753]: I1205 18:24:06.019530 4753 generic.go:334] "Generic (PLEG): container finished" podID="f123c5ee-338e-4af3-b1c7-6907b0dd7966" containerID="3e7e909d32e0a02da5e75377925fbf7ee78eff33a7e417b69b38b45596f08023" exitCode=0 Dec 05 18:24:06 crc kubenswrapper[4753]: I1205 18:24:06.019596 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2q567" event={"ID":"f123c5ee-338e-4af3-b1c7-6907b0dd7966","Type":"ContainerDied","Data":"3e7e909d32e0a02da5e75377925fbf7ee78eff33a7e417b69b38b45596f08023"} Dec 05 18:24:06 crc kubenswrapper[4753]: I1205 18:24:06.019636 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2q567" event={"ID":"f123c5ee-338e-4af3-b1c7-6907b0dd7966","Type":"ContainerDied","Data":"25d46ccb38392bc730a3607d4ba51daa70397f9677f69ec6c719cb2c2af73f03"} Dec 05 18:24:06 crc kubenswrapper[4753]: I1205 18:24:06.019647 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2q567" Dec 05 18:24:06 crc kubenswrapper[4753]: I1205 18:24:06.019657 4753 scope.go:117] "RemoveContainer" containerID="3e7e909d32e0a02da5e75377925fbf7ee78eff33a7e417b69b38b45596f08023" Dec 05 18:24:06 crc kubenswrapper[4753]: I1205 18:24:06.039191 4753 scope.go:117] "RemoveContainer" containerID="e18c38c950286a05432e05a937bd9208bb4622f6a30dfd4db88197d4d544cf80" Dec 05 18:24:06 crc kubenswrapper[4753]: I1205 18:24:06.066749 4753 scope.go:117] "RemoveContainer" containerID="94f5bfb713405d50afe1d63a7c97be084cd8943dedd934d73be5113c657ca584" Dec 05 18:24:06 crc kubenswrapper[4753]: I1205 18:24:06.078211 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2q567"] Dec 05 18:24:06 crc kubenswrapper[4753]: I1205 18:24:06.094317 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-2q567"] Dec 05 18:24:06 crc kubenswrapper[4753]: I1205 18:24:06.118833 4753 scope.go:117] "RemoveContainer" containerID="3e7e909d32e0a02da5e75377925fbf7ee78eff33a7e417b69b38b45596f08023" Dec 05 18:24:06 crc kubenswrapper[4753]: E1205 18:24:06.119314 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3e7e909d32e0a02da5e75377925fbf7ee78eff33a7e417b69b38b45596f08023\": container with ID starting with 3e7e909d32e0a02da5e75377925fbf7ee78eff33a7e417b69b38b45596f08023 not found: ID does not exist" containerID="3e7e909d32e0a02da5e75377925fbf7ee78eff33a7e417b69b38b45596f08023" Dec 05 18:24:06 crc kubenswrapper[4753]: I1205 18:24:06.119414 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3e7e909d32e0a02da5e75377925fbf7ee78eff33a7e417b69b38b45596f08023"} err="failed to get container status \"3e7e909d32e0a02da5e75377925fbf7ee78eff33a7e417b69b38b45596f08023\": rpc error: code = NotFound desc = could not find container \"3e7e909d32e0a02da5e75377925fbf7ee78eff33a7e417b69b38b45596f08023\": container with ID starting with 3e7e909d32e0a02da5e75377925fbf7ee78eff33a7e417b69b38b45596f08023 not found: ID does not exist" Dec 05 18:24:06 crc kubenswrapper[4753]: I1205 18:24:06.119492 4753 scope.go:117] "RemoveContainer" containerID="e18c38c950286a05432e05a937bd9208bb4622f6a30dfd4db88197d4d544cf80" Dec 05 18:24:06 crc kubenswrapper[4753]: E1205 18:24:06.119785 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e18c38c950286a05432e05a937bd9208bb4622f6a30dfd4db88197d4d544cf80\": container with ID starting with e18c38c950286a05432e05a937bd9208bb4622f6a30dfd4db88197d4d544cf80 not found: ID does not exist" containerID="e18c38c950286a05432e05a937bd9208bb4622f6a30dfd4db88197d4d544cf80" Dec 05 18:24:06 crc kubenswrapper[4753]: I1205 18:24:06.119862 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e18c38c950286a05432e05a937bd9208bb4622f6a30dfd4db88197d4d544cf80"} err="failed to get container status \"e18c38c950286a05432e05a937bd9208bb4622f6a30dfd4db88197d4d544cf80\": rpc error: code = NotFound desc = could not find container \"e18c38c950286a05432e05a937bd9208bb4622f6a30dfd4db88197d4d544cf80\": container with ID starting with e18c38c950286a05432e05a937bd9208bb4622f6a30dfd4db88197d4d544cf80 not found: ID does not exist" Dec 05 18:24:06 crc kubenswrapper[4753]: I1205 18:24:06.119923 4753 scope.go:117] "RemoveContainer" containerID="94f5bfb713405d50afe1d63a7c97be084cd8943dedd934d73be5113c657ca584" Dec 05 18:24:06 crc kubenswrapper[4753]: E1205 18:24:06.120163 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"94f5bfb713405d50afe1d63a7c97be084cd8943dedd934d73be5113c657ca584\": container with ID starting with 94f5bfb713405d50afe1d63a7c97be084cd8943dedd934d73be5113c657ca584 not found: ID does not exist" containerID="94f5bfb713405d50afe1d63a7c97be084cd8943dedd934d73be5113c657ca584" Dec 05 18:24:06 crc kubenswrapper[4753]: I1205 18:24:06.120234 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"94f5bfb713405d50afe1d63a7c97be084cd8943dedd934d73be5113c657ca584"} err="failed to get container status \"94f5bfb713405d50afe1d63a7c97be084cd8943dedd934d73be5113c657ca584\": rpc error: code = NotFound desc = could not find container \"94f5bfb713405d50afe1d63a7c97be084cd8943dedd934d73be5113c657ca584\": container with ID starting with 94f5bfb713405d50afe1d63a7c97be084cd8943dedd934d73be5113c657ca584 not found: ID does not exist" Dec 05 18:24:07 crc kubenswrapper[4753]: I1205 18:24:07.733701 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f123c5ee-338e-4af3-b1c7-6907b0dd7966" path="/var/lib/kubelet/pods/f123c5ee-338e-4af3-b1c7-6907b0dd7966/volumes" Dec 05 18:24:13 crc kubenswrapper[4753]: I1205 18:24:13.720820 4753 scope.go:117] "RemoveContainer" containerID="cc0f1cc17aa951c21ac505d180d094261b7dca04b8a1e989424ff5d5fd93fc78" Dec 05 18:24:13 crc kubenswrapper[4753]: E1205 18:24:13.721687 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 18:24:16 crc kubenswrapper[4753]: I1205 18:24:16.607289 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7fbb5f6569-hdwrw_eccebd56-3231-40e1-b2f8-3b02547ff479/nmstate-console-plugin/0.log" Dec 05 18:24:16 crc kubenswrapper[4753]: I1205 18:24:16.759621 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-nq5l2_1d89bd55-dfdc-4dc5-94c7-36e3b21d95b3/nmstate-handler/0.log" Dec 05 18:24:16 crc kubenswrapper[4753]: I1205 18:24:16.796274 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-ql66l_3f053a7e-ea2d-4b0e-b0fb-928c0038c436/kube-rbac-proxy/0.log" Dec 05 18:24:16 crc kubenswrapper[4753]: I1205 18:24:16.834711 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-ql66l_3f053a7e-ea2d-4b0e-b0fb-928c0038c436/nmstate-metrics/0.log" Dec 05 18:24:16 crc kubenswrapper[4753]: I1205 18:24:16.987559 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5b5b58f5c8-5fxjp_37867715-d9ff-40d5-9c97-b99fd63be4b9/nmstate-operator/0.log" Dec 05 18:24:17 crc kubenswrapper[4753]: I1205 18:24:17.040719 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-5f6d4c5ccb-mkz6b_dcfb16ac-e2ad-4b15-a3c7-d2c35e950739/nmstate-webhook/0.log" Dec 05 18:24:28 crc kubenswrapper[4753]: I1205 18:24:28.720349 4753 scope.go:117] "RemoveContainer" containerID="cc0f1cc17aa951c21ac505d180d094261b7dca04b8a1e989424ff5d5fd93fc78" Dec 05 18:24:28 crc kubenswrapper[4753]: E1205 18:24:28.721008 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 18:24:32 crc kubenswrapper[4753]: I1205 18:24:32.314751 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-69686586d4-rttvr_2d74ebb5-4059-4fcd-beef-f9e7bd2731d4/manager/0.log" Dec 05 18:24:32 crc kubenswrapper[4753]: I1205 18:24:32.319060 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-69686586d4-rttvr_2d74ebb5-4059-4fcd-beef-f9e7bd2731d4/kube-rbac-proxy/0.log" Dec 05 18:24:39 crc kubenswrapper[4753]: I1205 18:24:39.721285 4753 scope.go:117] "RemoveContainer" containerID="cc0f1cc17aa951c21ac505d180d094261b7dca04b8a1e989424ff5d5fd93fc78" Dec 05 18:24:39 crc kubenswrapper[4753]: E1205 18:24:39.722251 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 18:24:47 crc kubenswrapper[4753]: I1205 18:24:47.301670 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-q7lzx_23e6e527-684e-4e11-8470-b4149bb4c6cc/kube-rbac-proxy/0.log" Dec 05 18:24:47 crc kubenswrapper[4753]: I1205 18:24:47.487558 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-q7lzx_23e6e527-684e-4e11-8470-b4149bb4c6cc/controller/0.log" Dec 05 18:24:47 crc kubenswrapper[4753]: I1205 18:24:47.572001 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2gprd_bade1527-dbee-4843-b00d-0a41e7c516d9/cp-frr-files/0.log" Dec 05 18:24:47 crc kubenswrapper[4753]: I1205 18:24:47.691439 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2gprd_bade1527-dbee-4843-b00d-0a41e7c516d9/cp-reloader/0.log" Dec 05 18:24:47 crc kubenswrapper[4753]: I1205 18:24:47.698008 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2gprd_bade1527-dbee-4843-b00d-0a41e7c516d9/cp-frr-files/0.log" Dec 05 18:24:47 crc kubenswrapper[4753]: I1205 18:24:47.789320 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2gprd_bade1527-dbee-4843-b00d-0a41e7c516d9/cp-metrics/0.log" Dec 05 18:24:47 crc kubenswrapper[4753]: I1205 18:24:47.789375 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2gprd_bade1527-dbee-4843-b00d-0a41e7c516d9/cp-reloader/0.log" Dec 05 18:24:48 crc kubenswrapper[4753]: I1205 18:24:48.057079 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2gprd_bade1527-dbee-4843-b00d-0a41e7c516d9/cp-frr-files/0.log" Dec 05 18:24:48 crc kubenswrapper[4753]: I1205 18:24:48.067631 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2gprd_bade1527-dbee-4843-b00d-0a41e7c516d9/cp-reloader/0.log" Dec 05 18:24:48 crc kubenswrapper[4753]: I1205 18:24:48.102263 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2gprd_bade1527-dbee-4843-b00d-0a41e7c516d9/cp-metrics/0.log" Dec 05 18:24:48 crc kubenswrapper[4753]: I1205 18:24:48.113594 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2gprd_bade1527-dbee-4843-b00d-0a41e7c516d9/cp-metrics/0.log" Dec 05 18:24:48 crc kubenswrapper[4753]: I1205 18:24:48.274996 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2gprd_bade1527-dbee-4843-b00d-0a41e7c516d9/cp-reloader/0.log" Dec 05 18:24:48 crc kubenswrapper[4753]: I1205 18:24:48.333709 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2gprd_bade1527-dbee-4843-b00d-0a41e7c516d9/controller/0.log" Dec 05 18:24:48 crc kubenswrapper[4753]: I1205 18:24:48.336399 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2gprd_bade1527-dbee-4843-b00d-0a41e7c516d9/cp-frr-files/0.log" Dec 05 18:24:48 crc kubenswrapper[4753]: I1205 18:24:48.338666 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2gprd_bade1527-dbee-4843-b00d-0a41e7c516d9/cp-metrics/0.log" Dec 05 18:24:48 crc kubenswrapper[4753]: I1205 18:24:48.502207 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2gprd_bade1527-dbee-4843-b00d-0a41e7c516d9/frr-metrics/0.log" Dec 05 18:24:48 crc kubenswrapper[4753]: I1205 18:24:48.542057 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2gprd_bade1527-dbee-4843-b00d-0a41e7c516d9/kube-rbac-proxy-frr/0.log" Dec 05 18:24:48 crc kubenswrapper[4753]: I1205 18:24:48.559685 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2gprd_bade1527-dbee-4843-b00d-0a41e7c516d9/kube-rbac-proxy/0.log" Dec 05 18:24:48 crc kubenswrapper[4753]: I1205 18:24:48.780282 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2gprd_bade1527-dbee-4843-b00d-0a41e7c516d9/reloader/0.log" Dec 05 18:24:48 crc kubenswrapper[4753]: I1205 18:24:48.804797 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7fcb986d4-hn9ht_42fa5f2a-00be-462f-b4f2-35e8b89e8a5e/frr-k8s-webhook-server/0.log" Dec 05 18:24:49 crc kubenswrapper[4753]: I1205 18:24:49.125387 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-79f6547568-z26rz_44de2355-c97c-4421-87b6-1e7301bf430b/manager/0.log" Dec 05 18:24:49 crc kubenswrapper[4753]: I1205 18:24:49.313049 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-7c4d66c645-ptcch_15c925fa-97bb-4d10-b85d-b451adac7306/webhook-server/0.log" Dec 05 18:24:49 crc kubenswrapper[4753]: I1205 18:24:49.313413 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-jls6g_08625193-514e-494a-b64a-75f345cf14bc/kube-rbac-proxy/0.log" Dec 05 18:24:49 crc kubenswrapper[4753]: I1205 18:24:49.959014 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-jls6g_08625193-514e-494a-b64a-75f345cf14bc/speaker/0.log" Dec 05 18:24:50 crc kubenswrapper[4753]: I1205 18:24:50.045323 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-2gprd_bade1527-dbee-4843-b00d-0a41e7c516d9/frr/0.log" Dec 05 18:24:53 crc kubenswrapper[4753]: I1205 18:24:53.721348 4753 scope.go:117] "RemoveContainer" containerID="cc0f1cc17aa951c21ac505d180d094261b7dca04b8a1e989424ff5d5fd93fc78" Dec 05 18:24:53 crc kubenswrapper[4753]: E1205 18:24:53.722282 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 18:25:04 crc kubenswrapper[4753]: I1205 18:25:04.721518 4753 scope.go:117] "RemoveContainer" containerID="cc0f1cc17aa951c21ac505d180d094261b7dca04b8a1e989424ff5d5fd93fc78" Dec 05 18:25:04 crc kubenswrapper[4753]: E1205 18:25:04.722478 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 18:25:05 crc kubenswrapper[4753]: I1205 18:25:05.823037 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f876gj_cd2c3a45-8e4a-47ec-9552-9aeafd8bb61d/util/0.log" Dec 05 18:25:06 crc kubenswrapper[4753]: I1205 18:25:06.316979 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f876gj_cd2c3a45-8e4a-47ec-9552-9aeafd8bb61d/pull/0.log" Dec 05 18:25:06 crc kubenswrapper[4753]: I1205 18:25:06.335894 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f876gj_cd2c3a45-8e4a-47ec-9552-9aeafd8bb61d/util/0.log" Dec 05 18:25:06 crc kubenswrapper[4753]: I1205 18:25:06.377410 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f876gj_cd2c3a45-8e4a-47ec-9552-9aeafd8bb61d/pull/0.log" Dec 05 18:25:06 crc kubenswrapper[4753]: I1205 18:25:06.549053 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f876gj_cd2c3a45-8e4a-47ec-9552-9aeafd8bb61d/extract/0.log" Dec 05 18:25:06 crc kubenswrapper[4753]: I1205 18:25:06.553815 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f876gj_cd2c3a45-8e4a-47ec-9552-9aeafd8bb61d/util/0.log" Dec 05 18:25:06 crc kubenswrapper[4753]: I1205 18:25:06.580778 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212f876gj_cd2c3a45-8e4a-47ec-9552-9aeafd8bb61d/pull/0.log" Dec 05 18:25:06 crc kubenswrapper[4753]: I1205 18:25:06.757211 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83bhvd4_83d761d5-40da-46c2-b378-cd1cde770ccd/util/0.log" Dec 05 18:25:06 crc kubenswrapper[4753]: I1205 18:25:06.930349 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83bhvd4_83d761d5-40da-46c2-b378-cd1cde770ccd/util/0.log" Dec 05 18:25:06 crc kubenswrapper[4753]: I1205 18:25:06.995439 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83bhvd4_83d761d5-40da-46c2-b378-cd1cde770ccd/pull/0.log" Dec 05 18:25:06 crc kubenswrapper[4753]: I1205 18:25:06.998851 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83bhvd4_83d761d5-40da-46c2-b378-cd1cde770ccd/pull/0.log" Dec 05 18:25:07 crc kubenswrapper[4753]: I1205 18:25:07.174426 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83bhvd4_83d761d5-40da-46c2-b378-cd1cde770ccd/pull/0.log" Dec 05 18:25:07 crc kubenswrapper[4753]: I1205 18:25:07.177353 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83bhvd4_83d761d5-40da-46c2-b378-cd1cde770ccd/extract/0.log" Dec 05 18:25:07 crc kubenswrapper[4753]: I1205 18:25:07.210182 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83bhvd4_83d761d5-40da-46c2-b378-cd1cde770ccd/util/0.log" Dec 05 18:25:07 crc kubenswrapper[4753]: I1205 18:25:07.345907 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-djnfb_93a67264-59fa-4e32-868c-d4d308a5fed2/extract-utilities/0.log" Dec 05 18:25:07 crc kubenswrapper[4753]: I1205 18:25:07.533932 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-djnfb_93a67264-59fa-4e32-868c-d4d308a5fed2/extract-utilities/0.log" Dec 05 18:25:07 crc kubenswrapper[4753]: I1205 18:25:07.540124 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-djnfb_93a67264-59fa-4e32-868c-d4d308a5fed2/extract-content/0.log" Dec 05 18:25:07 crc kubenswrapper[4753]: I1205 18:25:07.564497 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-djnfb_93a67264-59fa-4e32-868c-d4d308a5fed2/extract-content/0.log" Dec 05 18:25:07 crc kubenswrapper[4753]: I1205 18:25:07.707609 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-djnfb_93a67264-59fa-4e32-868c-d4d308a5fed2/extract-utilities/0.log" Dec 05 18:25:07 crc kubenswrapper[4753]: I1205 18:25:07.749747 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-djnfb_93a67264-59fa-4e32-868c-d4d308a5fed2/extract-content/0.log" Dec 05 18:25:08 crc kubenswrapper[4753]: I1205 18:25:08.039171 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-dsvw6_40a8284b-f439-41a8-a064-9582c9d50ec4/extract-utilities/0.log" Dec 05 18:25:08 crc kubenswrapper[4753]: I1205 18:25:08.285171 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-djnfb_93a67264-59fa-4e32-868c-d4d308a5fed2/registry-server/0.log" Dec 05 18:25:08 crc kubenswrapper[4753]: I1205 18:25:08.290621 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-dsvw6_40a8284b-f439-41a8-a064-9582c9d50ec4/extract-utilities/0.log" Dec 05 18:25:08 crc kubenswrapper[4753]: I1205 18:25:08.306237 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-dsvw6_40a8284b-f439-41a8-a064-9582c9d50ec4/extract-content/0.log" Dec 05 18:25:08 crc kubenswrapper[4753]: I1205 18:25:08.313675 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-dsvw6_40a8284b-f439-41a8-a064-9582c9d50ec4/extract-content/0.log" Dec 05 18:25:08 crc kubenswrapper[4753]: I1205 18:25:08.464971 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-dsvw6_40a8284b-f439-41a8-a064-9582c9d50ec4/extract-utilities/0.log" Dec 05 18:25:08 crc kubenswrapper[4753]: I1205 18:25:08.492905 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-dsvw6_40a8284b-f439-41a8-a064-9582c9d50ec4/extract-content/0.log" Dec 05 18:25:08 crc kubenswrapper[4753]: I1205 18:25:08.719780 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-kr92j_303b3266-3775-4fcb-aac9-432b1fefaedc/marketplace-operator/0.log" Dec 05 18:25:08 crc kubenswrapper[4753]: I1205 18:25:08.921612 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-6ndfl_b29e1fdb-cc3c-412d-9194-8f40a860b5f2/extract-utilities/0.log" Dec 05 18:25:09 crc kubenswrapper[4753]: I1205 18:25:09.177221 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-dsvw6_40a8284b-f439-41a8-a064-9582c9d50ec4/registry-server/0.log" Dec 05 18:25:09 crc kubenswrapper[4753]: I1205 18:25:09.207412 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-6ndfl_b29e1fdb-cc3c-412d-9194-8f40a860b5f2/extract-utilities/0.log" Dec 05 18:25:09 crc kubenswrapper[4753]: I1205 18:25:09.259840 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-6ndfl_b29e1fdb-cc3c-412d-9194-8f40a860b5f2/extract-content/0.log" Dec 05 18:25:09 crc kubenswrapper[4753]: I1205 18:25:09.284279 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-6ndfl_b29e1fdb-cc3c-412d-9194-8f40a860b5f2/extract-content/0.log" Dec 05 18:25:09 crc kubenswrapper[4753]: I1205 18:25:09.503221 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-6ndfl_b29e1fdb-cc3c-412d-9194-8f40a860b5f2/extract-content/0.log" Dec 05 18:25:09 crc kubenswrapper[4753]: I1205 18:25:09.511124 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-6ndfl_b29e1fdb-cc3c-412d-9194-8f40a860b5f2/extract-utilities/0.log" Dec 05 18:25:09 crc kubenswrapper[4753]: I1205 18:25:09.669681 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-6ndfl_b29e1fdb-cc3c-412d-9194-8f40a860b5f2/registry-server/0.log" Dec 05 18:25:09 crc kubenswrapper[4753]: I1205 18:25:09.673110 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-bm2xh_21af1eb9-08c2-4c3c-a3b3-e02577bd18a2/extract-utilities/0.log" Dec 05 18:25:09 crc kubenswrapper[4753]: I1205 18:25:09.864288 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-bm2xh_21af1eb9-08c2-4c3c-a3b3-e02577bd18a2/extract-content/0.log" Dec 05 18:25:09 crc kubenswrapper[4753]: I1205 18:25:09.899996 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-bm2xh_21af1eb9-08c2-4c3c-a3b3-e02577bd18a2/extract-content/0.log" Dec 05 18:25:09 crc kubenswrapper[4753]: I1205 18:25:09.902418 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-bm2xh_21af1eb9-08c2-4c3c-a3b3-e02577bd18a2/extract-utilities/0.log" Dec 05 18:25:10 crc kubenswrapper[4753]: I1205 18:25:10.090687 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-bm2xh_21af1eb9-08c2-4c3c-a3b3-e02577bd18a2/extract-utilities/0.log" Dec 05 18:25:10 crc kubenswrapper[4753]: I1205 18:25:10.105200 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-bm2xh_21af1eb9-08c2-4c3c-a3b3-e02577bd18a2/extract-content/0.log" Dec 05 18:25:10 crc kubenswrapper[4753]: I1205 18:25:10.458904 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-bm2xh_21af1eb9-08c2-4c3c-a3b3-e02577bd18a2/registry-server/0.log" Dec 05 18:25:15 crc kubenswrapper[4753]: I1205 18:25:15.721177 4753 scope.go:117] "RemoveContainer" containerID="cc0f1cc17aa951c21ac505d180d094261b7dca04b8a1e989424ff5d5fd93fc78" Dec 05 18:25:15 crc kubenswrapper[4753]: E1205 18:25:15.721924 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 18:25:24 crc kubenswrapper[4753]: I1205 18:25:24.255963 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-668cf9dfbb-f4kz7_3a0c0fe6-2fda-4398-8f9b-4ba0b6b52182/prometheus-operator/0.log" Dec 05 18:25:24 crc kubenswrapper[4753]: I1205 18:25:24.358329 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-57647d658-ds52w_96a23d8c-f2af-4e5d-afa5-4734f81f73ef/prometheus-operator-admission-webhook/0.log" Dec 05 18:25:24 crc kubenswrapper[4753]: I1205 18:25:24.453948 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-57647d658-tsxw8_b1a19a2b-30b2-47bd-a4e3-cb23e37e16cf/prometheus-operator-admission-webhook/0.log" Dec 05 18:25:24 crc kubenswrapper[4753]: I1205 18:25:24.565738 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-d8bb48f5d-x8ww2_6b271246-3310-483c-a548-db788331725d/operator/0.log" Dec 05 18:25:24 crc kubenswrapper[4753]: I1205 18:25:24.656670 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5446b9c989-66rht_a1cc3eef-b65a-496f-9cf1-7567825fce78/perses-operator/0.log" Dec 05 18:25:26 crc kubenswrapper[4753]: I1205 18:25:26.721671 4753 scope.go:117] "RemoveContainer" containerID="cc0f1cc17aa951c21ac505d180d094261b7dca04b8a1e989424ff5d5fd93fc78" Dec 05 18:25:26 crc kubenswrapper[4753]: E1205 18:25:26.722448 4753 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-khn68_openshift-machine-config-operator(3fc2db1a-9f5e-4f36-b713-1a385f3a2d68)\"" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" Dec 05 18:25:37 crc kubenswrapper[4753]: I1205 18:25:37.720748 4753 scope.go:117] "RemoveContainer" containerID="cc0f1cc17aa951c21ac505d180d094261b7dca04b8a1e989424ff5d5fd93fc78" Dec 05 18:25:37 crc kubenswrapper[4753]: I1205 18:25:37.944691 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" event={"ID":"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68","Type":"ContainerStarted","Data":"703e56c5800061dd9ae2980dc51adb5fc042aab3223556efd2197f0e8183ae47"} Dec 05 18:25:40 crc kubenswrapper[4753]: I1205 18:25:40.288420 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-69686586d4-rttvr_2d74ebb5-4059-4fcd-beef-f9e7bd2731d4/kube-rbac-proxy/0.log" Dec 05 18:25:40 crc kubenswrapper[4753]: I1205 18:25:40.350597 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators-redhat_loki-operator-controller-manager-69686586d4-rttvr_2d74ebb5-4059-4fcd-beef-f9e7bd2731d4/manager/0.log" Dec 05 18:26:09 crc kubenswrapper[4753]: I1205 18:26:09.709488 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-7sqwj"] Dec 05 18:26:09 crc kubenswrapper[4753]: E1205 18:26:09.710746 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f123c5ee-338e-4af3-b1c7-6907b0dd7966" containerName="extract-utilities" Dec 05 18:26:09 crc kubenswrapper[4753]: I1205 18:26:09.710763 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="f123c5ee-338e-4af3-b1c7-6907b0dd7966" containerName="extract-utilities" Dec 05 18:26:09 crc kubenswrapper[4753]: E1205 18:26:09.710785 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f123c5ee-338e-4af3-b1c7-6907b0dd7966" containerName="registry-server" Dec 05 18:26:09 crc kubenswrapper[4753]: I1205 18:26:09.710791 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="f123c5ee-338e-4af3-b1c7-6907b0dd7966" containerName="registry-server" Dec 05 18:26:09 crc kubenswrapper[4753]: E1205 18:26:09.710824 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f123c5ee-338e-4af3-b1c7-6907b0dd7966" containerName="extract-content" Dec 05 18:26:09 crc kubenswrapper[4753]: I1205 18:26:09.710833 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="f123c5ee-338e-4af3-b1c7-6907b0dd7966" containerName="extract-content" Dec 05 18:26:09 crc kubenswrapper[4753]: I1205 18:26:09.711065 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="f123c5ee-338e-4af3-b1c7-6907b0dd7966" containerName="registry-server" Dec 05 18:26:09 crc kubenswrapper[4753]: I1205 18:26:09.712852 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7sqwj" Dec 05 18:26:09 crc kubenswrapper[4753]: I1205 18:26:09.751172 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-7sqwj"] Dec 05 18:26:09 crc kubenswrapper[4753]: I1205 18:26:09.879343 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4dcb652d-381d-49fd-8486-20a269fbe3ac-utilities\") pod \"redhat-operators-7sqwj\" (UID: \"4dcb652d-381d-49fd-8486-20a269fbe3ac\") " pod="openshift-marketplace/redhat-operators-7sqwj" Dec 05 18:26:09 crc kubenswrapper[4753]: I1205 18:26:09.879445 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8hjn6\" (UniqueName: \"kubernetes.io/projected/4dcb652d-381d-49fd-8486-20a269fbe3ac-kube-api-access-8hjn6\") pod \"redhat-operators-7sqwj\" (UID: \"4dcb652d-381d-49fd-8486-20a269fbe3ac\") " pod="openshift-marketplace/redhat-operators-7sqwj" Dec 05 18:26:09 crc kubenswrapper[4753]: I1205 18:26:09.879527 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4dcb652d-381d-49fd-8486-20a269fbe3ac-catalog-content\") pod \"redhat-operators-7sqwj\" (UID: \"4dcb652d-381d-49fd-8486-20a269fbe3ac\") " pod="openshift-marketplace/redhat-operators-7sqwj" Dec 05 18:26:09 crc kubenswrapper[4753]: I1205 18:26:09.983204 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4dcb652d-381d-49fd-8486-20a269fbe3ac-catalog-content\") pod \"redhat-operators-7sqwj\" (UID: \"4dcb652d-381d-49fd-8486-20a269fbe3ac\") " pod="openshift-marketplace/redhat-operators-7sqwj" Dec 05 18:26:09 crc kubenswrapper[4753]: I1205 18:26:09.983442 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4dcb652d-381d-49fd-8486-20a269fbe3ac-utilities\") pod \"redhat-operators-7sqwj\" (UID: \"4dcb652d-381d-49fd-8486-20a269fbe3ac\") " pod="openshift-marketplace/redhat-operators-7sqwj" Dec 05 18:26:09 crc kubenswrapper[4753]: I1205 18:26:09.983557 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8hjn6\" (UniqueName: \"kubernetes.io/projected/4dcb652d-381d-49fd-8486-20a269fbe3ac-kube-api-access-8hjn6\") pod \"redhat-operators-7sqwj\" (UID: \"4dcb652d-381d-49fd-8486-20a269fbe3ac\") " pod="openshift-marketplace/redhat-operators-7sqwj" Dec 05 18:26:09 crc kubenswrapper[4753]: I1205 18:26:09.984077 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4dcb652d-381d-49fd-8486-20a269fbe3ac-catalog-content\") pod \"redhat-operators-7sqwj\" (UID: \"4dcb652d-381d-49fd-8486-20a269fbe3ac\") " pod="openshift-marketplace/redhat-operators-7sqwj" Dec 05 18:26:09 crc kubenswrapper[4753]: I1205 18:26:09.984528 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4dcb652d-381d-49fd-8486-20a269fbe3ac-utilities\") pod \"redhat-operators-7sqwj\" (UID: \"4dcb652d-381d-49fd-8486-20a269fbe3ac\") " pod="openshift-marketplace/redhat-operators-7sqwj" Dec 05 18:26:10 crc kubenswrapper[4753]: I1205 18:26:10.013248 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8hjn6\" (UniqueName: \"kubernetes.io/projected/4dcb652d-381d-49fd-8486-20a269fbe3ac-kube-api-access-8hjn6\") pod \"redhat-operators-7sqwj\" (UID: \"4dcb652d-381d-49fd-8486-20a269fbe3ac\") " pod="openshift-marketplace/redhat-operators-7sqwj" Dec 05 18:26:10 crc kubenswrapper[4753]: I1205 18:26:10.052202 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7sqwj" Dec 05 18:26:10 crc kubenswrapper[4753]: I1205 18:26:10.559444 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-7sqwj"] Dec 05 18:26:11 crc kubenswrapper[4753]: I1205 18:26:11.327378 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7sqwj" event={"ID":"4dcb652d-381d-49fd-8486-20a269fbe3ac","Type":"ContainerDied","Data":"58d1d5343f85efffc4e7aaf8b840e3ffae385f343ce552d6f7c3be2cfd6f9e42"} Dec 05 18:26:11 crc kubenswrapper[4753]: I1205 18:26:11.327404 4753 generic.go:334] "Generic (PLEG): container finished" podID="4dcb652d-381d-49fd-8486-20a269fbe3ac" containerID="58d1d5343f85efffc4e7aaf8b840e3ffae385f343ce552d6f7c3be2cfd6f9e42" exitCode=0 Dec 05 18:26:11 crc kubenswrapper[4753]: I1205 18:26:11.327481 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7sqwj" event={"ID":"4dcb652d-381d-49fd-8486-20a269fbe3ac","Type":"ContainerStarted","Data":"fbd1bf8ba3d874bed50872447f54da1c4d769fe33402d800da635e48abf9a34e"} Dec 05 18:26:12 crc kubenswrapper[4753]: I1205 18:26:12.338347 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7sqwj" event={"ID":"4dcb652d-381d-49fd-8486-20a269fbe3ac","Type":"ContainerStarted","Data":"b70c0d47d4d3de2f10a99fee5d23bbaf17cc025afd41adfd377d311c331dc987"} Dec 05 18:26:14 crc kubenswrapper[4753]: I1205 18:26:14.362117 4753 generic.go:334] "Generic (PLEG): container finished" podID="4dcb652d-381d-49fd-8486-20a269fbe3ac" containerID="b70c0d47d4d3de2f10a99fee5d23bbaf17cc025afd41adfd377d311c331dc987" exitCode=0 Dec 05 18:26:14 crc kubenswrapper[4753]: I1205 18:26:14.362188 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7sqwj" event={"ID":"4dcb652d-381d-49fd-8486-20a269fbe3ac","Type":"ContainerDied","Data":"b70c0d47d4d3de2f10a99fee5d23bbaf17cc025afd41adfd377d311c331dc987"} Dec 05 18:26:15 crc kubenswrapper[4753]: I1205 18:26:15.375239 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7sqwj" event={"ID":"4dcb652d-381d-49fd-8486-20a269fbe3ac","Type":"ContainerStarted","Data":"9808b8bdc31288e4a7c913a09f353358303c8c679fdd75c10422195013487fe4"} Dec 05 18:26:15 crc kubenswrapper[4753]: I1205 18:26:15.399270 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-7sqwj" podStartSLOduration=3.012565527 podStartE2EDuration="6.398949747s" podCreationTimestamp="2025-12-05 18:26:09 +0000 UTC" firstStartedPulling="2025-12-05 18:26:11.331138199 +0000 UTC m=+4909.834245245" lastFinishedPulling="2025-12-05 18:26:14.717522469 +0000 UTC m=+4913.220629465" observedRunningTime="2025-12-05 18:26:15.396107316 +0000 UTC m=+4913.899214362" watchObservedRunningTime="2025-12-05 18:26:15.398949747 +0000 UTC m=+4913.902056763" Dec 05 18:26:20 crc kubenswrapper[4753]: I1205 18:26:20.053187 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-7sqwj" Dec 05 18:26:20 crc kubenswrapper[4753]: I1205 18:26:20.053709 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-7sqwj" Dec 05 18:26:21 crc kubenswrapper[4753]: I1205 18:26:21.134714 4753 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-7sqwj" podUID="4dcb652d-381d-49fd-8486-20a269fbe3ac" containerName="registry-server" probeResult="failure" output=< Dec 05 18:26:21 crc kubenswrapper[4753]: timeout: failed to connect service ":50051" within 1s Dec 05 18:26:21 crc kubenswrapper[4753]: > Dec 05 18:26:30 crc kubenswrapper[4753]: I1205 18:26:30.150097 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-7sqwj" Dec 05 18:26:30 crc kubenswrapper[4753]: I1205 18:26:30.223098 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-7sqwj" Dec 05 18:26:30 crc kubenswrapper[4753]: I1205 18:26:30.400913 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-7sqwj"] Dec 05 18:26:31 crc kubenswrapper[4753]: I1205 18:26:31.563678 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-7sqwj" podUID="4dcb652d-381d-49fd-8486-20a269fbe3ac" containerName="registry-server" containerID="cri-o://9808b8bdc31288e4a7c913a09f353358303c8c679fdd75c10422195013487fe4" gracePeriod=2 Dec 05 18:26:32 crc kubenswrapper[4753]: I1205 18:26:32.253889 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7sqwj" Dec 05 18:26:32 crc kubenswrapper[4753]: I1205 18:26:32.358034 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4dcb652d-381d-49fd-8486-20a269fbe3ac-catalog-content\") pod \"4dcb652d-381d-49fd-8486-20a269fbe3ac\" (UID: \"4dcb652d-381d-49fd-8486-20a269fbe3ac\") " Dec 05 18:26:32 crc kubenswrapper[4753]: I1205 18:26:32.358071 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4dcb652d-381d-49fd-8486-20a269fbe3ac-utilities\") pod \"4dcb652d-381d-49fd-8486-20a269fbe3ac\" (UID: \"4dcb652d-381d-49fd-8486-20a269fbe3ac\") " Dec 05 18:26:32 crc kubenswrapper[4753]: I1205 18:26:32.358257 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8hjn6\" (UniqueName: \"kubernetes.io/projected/4dcb652d-381d-49fd-8486-20a269fbe3ac-kube-api-access-8hjn6\") pod \"4dcb652d-381d-49fd-8486-20a269fbe3ac\" (UID: \"4dcb652d-381d-49fd-8486-20a269fbe3ac\") " Dec 05 18:26:32 crc kubenswrapper[4753]: I1205 18:26:32.359654 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4dcb652d-381d-49fd-8486-20a269fbe3ac-utilities" (OuterVolumeSpecName: "utilities") pod "4dcb652d-381d-49fd-8486-20a269fbe3ac" (UID: "4dcb652d-381d-49fd-8486-20a269fbe3ac"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 18:26:32 crc kubenswrapper[4753]: I1205 18:26:32.377911 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4dcb652d-381d-49fd-8486-20a269fbe3ac-kube-api-access-8hjn6" (OuterVolumeSpecName: "kube-api-access-8hjn6") pod "4dcb652d-381d-49fd-8486-20a269fbe3ac" (UID: "4dcb652d-381d-49fd-8486-20a269fbe3ac"). InnerVolumeSpecName "kube-api-access-8hjn6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 18:26:32 crc kubenswrapper[4753]: I1205 18:26:32.461000 4753 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4dcb652d-381d-49fd-8486-20a269fbe3ac-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 18:26:32 crc kubenswrapper[4753]: I1205 18:26:32.461034 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8hjn6\" (UniqueName: \"kubernetes.io/projected/4dcb652d-381d-49fd-8486-20a269fbe3ac-kube-api-access-8hjn6\") on node \"crc\" DevicePath \"\"" Dec 05 18:26:32 crc kubenswrapper[4753]: I1205 18:26:32.491608 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4dcb652d-381d-49fd-8486-20a269fbe3ac-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4dcb652d-381d-49fd-8486-20a269fbe3ac" (UID: "4dcb652d-381d-49fd-8486-20a269fbe3ac"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 18:26:32 crc kubenswrapper[4753]: I1205 18:26:32.562830 4753 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4dcb652d-381d-49fd-8486-20a269fbe3ac-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 18:26:32 crc kubenswrapper[4753]: I1205 18:26:32.607163 4753 generic.go:334] "Generic (PLEG): container finished" podID="4dcb652d-381d-49fd-8486-20a269fbe3ac" containerID="9808b8bdc31288e4a7c913a09f353358303c8c679fdd75c10422195013487fe4" exitCode=0 Dec 05 18:26:32 crc kubenswrapper[4753]: I1205 18:26:32.607213 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7sqwj" event={"ID":"4dcb652d-381d-49fd-8486-20a269fbe3ac","Type":"ContainerDied","Data":"9808b8bdc31288e4a7c913a09f353358303c8c679fdd75c10422195013487fe4"} Dec 05 18:26:32 crc kubenswrapper[4753]: I1205 18:26:32.607245 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7sqwj" event={"ID":"4dcb652d-381d-49fd-8486-20a269fbe3ac","Type":"ContainerDied","Data":"fbd1bf8ba3d874bed50872447f54da1c4d769fe33402d800da635e48abf9a34e"} Dec 05 18:26:32 crc kubenswrapper[4753]: I1205 18:26:32.607264 4753 scope.go:117] "RemoveContainer" containerID="9808b8bdc31288e4a7c913a09f353358303c8c679fdd75c10422195013487fe4" Dec 05 18:26:32 crc kubenswrapper[4753]: I1205 18:26:32.607490 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7sqwj" Dec 05 18:26:32 crc kubenswrapper[4753]: I1205 18:26:32.651761 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-7sqwj"] Dec 05 18:26:32 crc kubenswrapper[4753]: I1205 18:26:32.652660 4753 scope.go:117] "RemoveContainer" containerID="b70c0d47d4d3de2f10a99fee5d23bbaf17cc025afd41adfd377d311c331dc987" Dec 05 18:26:32 crc kubenswrapper[4753]: I1205 18:26:32.665383 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-7sqwj"] Dec 05 18:26:32 crc kubenswrapper[4753]: I1205 18:26:32.676548 4753 scope.go:117] "RemoveContainer" containerID="58d1d5343f85efffc4e7aaf8b840e3ffae385f343ce552d6f7c3be2cfd6f9e42" Dec 05 18:26:32 crc kubenswrapper[4753]: I1205 18:26:32.729893 4753 scope.go:117] "RemoveContainer" containerID="9808b8bdc31288e4a7c913a09f353358303c8c679fdd75c10422195013487fe4" Dec 05 18:26:32 crc kubenswrapper[4753]: E1205 18:26:32.730444 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9808b8bdc31288e4a7c913a09f353358303c8c679fdd75c10422195013487fe4\": container with ID starting with 9808b8bdc31288e4a7c913a09f353358303c8c679fdd75c10422195013487fe4 not found: ID does not exist" containerID="9808b8bdc31288e4a7c913a09f353358303c8c679fdd75c10422195013487fe4" Dec 05 18:26:32 crc kubenswrapper[4753]: I1205 18:26:32.730494 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9808b8bdc31288e4a7c913a09f353358303c8c679fdd75c10422195013487fe4"} err="failed to get container status \"9808b8bdc31288e4a7c913a09f353358303c8c679fdd75c10422195013487fe4\": rpc error: code = NotFound desc = could not find container \"9808b8bdc31288e4a7c913a09f353358303c8c679fdd75c10422195013487fe4\": container with ID starting with 9808b8bdc31288e4a7c913a09f353358303c8c679fdd75c10422195013487fe4 not found: ID does not exist" Dec 05 18:26:32 crc kubenswrapper[4753]: I1205 18:26:32.730521 4753 scope.go:117] "RemoveContainer" containerID="b70c0d47d4d3de2f10a99fee5d23bbaf17cc025afd41adfd377d311c331dc987" Dec 05 18:26:32 crc kubenswrapper[4753]: E1205 18:26:32.730789 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b70c0d47d4d3de2f10a99fee5d23bbaf17cc025afd41adfd377d311c331dc987\": container with ID starting with b70c0d47d4d3de2f10a99fee5d23bbaf17cc025afd41adfd377d311c331dc987 not found: ID does not exist" containerID="b70c0d47d4d3de2f10a99fee5d23bbaf17cc025afd41adfd377d311c331dc987" Dec 05 18:26:32 crc kubenswrapper[4753]: I1205 18:26:32.730852 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b70c0d47d4d3de2f10a99fee5d23bbaf17cc025afd41adfd377d311c331dc987"} err="failed to get container status \"b70c0d47d4d3de2f10a99fee5d23bbaf17cc025afd41adfd377d311c331dc987\": rpc error: code = NotFound desc = could not find container \"b70c0d47d4d3de2f10a99fee5d23bbaf17cc025afd41adfd377d311c331dc987\": container with ID starting with b70c0d47d4d3de2f10a99fee5d23bbaf17cc025afd41adfd377d311c331dc987 not found: ID does not exist" Dec 05 18:26:32 crc kubenswrapper[4753]: I1205 18:26:32.730892 4753 scope.go:117] "RemoveContainer" containerID="58d1d5343f85efffc4e7aaf8b840e3ffae385f343ce552d6f7c3be2cfd6f9e42" Dec 05 18:26:32 crc kubenswrapper[4753]: E1205 18:26:32.731272 4753 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"58d1d5343f85efffc4e7aaf8b840e3ffae385f343ce552d6f7c3be2cfd6f9e42\": container with ID starting with 58d1d5343f85efffc4e7aaf8b840e3ffae385f343ce552d6f7c3be2cfd6f9e42 not found: ID does not exist" containerID="58d1d5343f85efffc4e7aaf8b840e3ffae385f343ce552d6f7c3be2cfd6f9e42" Dec 05 18:26:32 crc kubenswrapper[4753]: I1205 18:26:32.731314 4753 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"58d1d5343f85efffc4e7aaf8b840e3ffae385f343ce552d6f7c3be2cfd6f9e42"} err="failed to get container status \"58d1d5343f85efffc4e7aaf8b840e3ffae385f343ce552d6f7c3be2cfd6f9e42\": rpc error: code = NotFound desc = could not find container \"58d1d5343f85efffc4e7aaf8b840e3ffae385f343ce552d6f7c3be2cfd6f9e42\": container with ID starting with 58d1d5343f85efffc4e7aaf8b840e3ffae385f343ce552d6f7c3be2cfd6f9e42 not found: ID does not exist" Dec 05 18:26:33 crc kubenswrapper[4753]: I1205 18:26:33.737908 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4dcb652d-381d-49fd-8486-20a269fbe3ac" path="/var/lib/kubelet/pods/4dcb652d-381d-49fd-8486-20a269fbe3ac/volumes" Dec 05 18:27:28 crc kubenswrapper[4753]: I1205 18:27:28.282223 4753 generic.go:334] "Generic (PLEG): container finished" podID="d8ff9c27-b92d-4c30-ba62-164ea425f02f" containerID="533987c37c07456c2aa4069d5d86bb217315589689154061f52195f8a8027f9b" exitCode=0 Dec 05 18:27:28 crc kubenswrapper[4753]: I1205 18:27:28.282334 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-grnvs/must-gather-8f7zr" event={"ID":"d8ff9c27-b92d-4c30-ba62-164ea425f02f","Type":"ContainerDied","Data":"533987c37c07456c2aa4069d5d86bb217315589689154061f52195f8a8027f9b"} Dec 05 18:27:28 crc kubenswrapper[4753]: I1205 18:27:28.283427 4753 scope.go:117] "RemoveContainer" containerID="533987c37c07456c2aa4069d5d86bb217315589689154061f52195f8a8027f9b" Dec 05 18:27:28 crc kubenswrapper[4753]: I1205 18:27:28.681867 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-grnvs_must-gather-8f7zr_d8ff9c27-b92d-4c30-ba62-164ea425f02f/gather/0.log" Dec 05 18:27:33 crc kubenswrapper[4753]: I1205 18:27:33.983917 4753 scope.go:117] "RemoveContainer" containerID="6c0045965cc4f076fbcfa7ddfe60111930c88ffe7d7d6b0d4f96b43bc99a67a2" Dec 05 18:27:38 crc kubenswrapper[4753]: I1205 18:27:38.927472 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-grnvs/must-gather-8f7zr"] Dec 05 18:27:38 crc kubenswrapper[4753]: I1205 18:27:38.928141 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-grnvs/must-gather-8f7zr" podUID="d8ff9c27-b92d-4c30-ba62-164ea425f02f" containerName="copy" containerID="cri-o://82e0327c3ecd7f3ab00ca469c1b49259727e3f729b79edd39e34080c8fe3333a" gracePeriod=2 Dec 05 18:27:38 crc kubenswrapper[4753]: I1205 18:27:38.931876 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-grnvs/must-gather-8f7zr"] Dec 05 18:27:39 crc kubenswrapper[4753]: I1205 18:27:39.408050 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-grnvs_must-gather-8f7zr_d8ff9c27-b92d-4c30-ba62-164ea425f02f/copy/0.log" Dec 05 18:27:39 crc kubenswrapper[4753]: I1205 18:27:39.409248 4753 generic.go:334] "Generic (PLEG): container finished" podID="d8ff9c27-b92d-4c30-ba62-164ea425f02f" containerID="82e0327c3ecd7f3ab00ca469c1b49259727e3f729b79edd39e34080c8fe3333a" exitCode=143 Dec 05 18:27:39 crc kubenswrapper[4753]: I1205 18:27:39.409322 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="533c83d87818d0968441a974adbaca5da50240cefbce57c402d76be7c3c764b3" Dec 05 18:27:39 crc kubenswrapper[4753]: I1205 18:27:39.482422 4753 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-grnvs_must-gather-8f7zr_d8ff9c27-b92d-4c30-ba62-164ea425f02f/copy/0.log" Dec 05 18:27:39 crc kubenswrapper[4753]: I1205 18:27:39.483265 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-grnvs/must-gather-8f7zr" Dec 05 18:27:39 crc kubenswrapper[4753]: I1205 18:27:39.613998 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bcdt4\" (UniqueName: \"kubernetes.io/projected/d8ff9c27-b92d-4c30-ba62-164ea425f02f-kube-api-access-bcdt4\") pod \"d8ff9c27-b92d-4c30-ba62-164ea425f02f\" (UID: \"d8ff9c27-b92d-4c30-ba62-164ea425f02f\") " Dec 05 18:27:39 crc kubenswrapper[4753]: I1205 18:27:39.614133 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/d8ff9c27-b92d-4c30-ba62-164ea425f02f-must-gather-output\") pod \"d8ff9c27-b92d-4c30-ba62-164ea425f02f\" (UID: \"d8ff9c27-b92d-4c30-ba62-164ea425f02f\") " Dec 05 18:27:39 crc kubenswrapper[4753]: I1205 18:27:39.620966 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d8ff9c27-b92d-4c30-ba62-164ea425f02f-kube-api-access-bcdt4" (OuterVolumeSpecName: "kube-api-access-bcdt4") pod "d8ff9c27-b92d-4c30-ba62-164ea425f02f" (UID: "d8ff9c27-b92d-4c30-ba62-164ea425f02f"). InnerVolumeSpecName "kube-api-access-bcdt4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 18:27:39 crc kubenswrapper[4753]: I1205 18:27:39.717553 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bcdt4\" (UniqueName: \"kubernetes.io/projected/d8ff9c27-b92d-4c30-ba62-164ea425f02f-kube-api-access-bcdt4\") on node \"crc\" DevicePath \"\"" Dec 05 18:27:39 crc kubenswrapper[4753]: I1205 18:27:39.794865 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d8ff9c27-b92d-4c30-ba62-164ea425f02f-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "d8ff9c27-b92d-4c30-ba62-164ea425f02f" (UID: "d8ff9c27-b92d-4c30-ba62-164ea425f02f"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 18:27:39 crc kubenswrapper[4753]: I1205 18:27:39.819457 4753 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/d8ff9c27-b92d-4c30-ba62-164ea425f02f-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 05 18:27:40 crc kubenswrapper[4753]: I1205 18:27:40.423287 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-grnvs/must-gather-8f7zr" Dec 05 18:27:41 crc kubenswrapper[4753]: I1205 18:27:41.730953 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d8ff9c27-b92d-4c30-ba62-164ea425f02f" path="/var/lib/kubelet/pods/d8ff9c27-b92d-4c30-ba62-164ea425f02f/volumes" Dec 05 18:27:58 crc kubenswrapper[4753]: I1205 18:27:58.978791 4753 patch_prober.go:28] interesting pod/machine-config-daemon-khn68 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 18:27:58 crc kubenswrapper[4753]: I1205 18:27:58.979435 4753 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 18:28:28 crc kubenswrapper[4753]: I1205 18:28:28.979896 4753 patch_prober.go:28] interesting pod/machine-config-daemon-khn68 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 18:28:28 crc kubenswrapper[4753]: I1205 18:28:28.980534 4753 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 18:28:34 crc kubenswrapper[4753]: I1205 18:28:34.066272 4753 scope.go:117] "RemoveContainer" containerID="82e0327c3ecd7f3ab00ca469c1b49259727e3f729b79edd39e34080c8fe3333a" Dec 05 18:28:34 crc kubenswrapper[4753]: I1205 18:28:34.106270 4753 scope.go:117] "RemoveContainer" containerID="533987c37c07456c2aa4069d5d86bb217315589689154061f52195f8a8027f9b" Dec 05 18:28:58 crc kubenswrapper[4753]: I1205 18:28:58.978702 4753 patch_prober.go:28] interesting pod/machine-config-daemon-khn68 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 18:28:58 crc kubenswrapper[4753]: I1205 18:28:58.979397 4753 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 18:28:58 crc kubenswrapper[4753]: I1205 18:28:58.979460 4753 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-khn68" Dec 05 18:28:58 crc kubenswrapper[4753]: I1205 18:28:58.980576 4753 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"703e56c5800061dd9ae2980dc51adb5fc042aab3223556efd2197f0e8183ae47"} pod="openshift-machine-config-operator/machine-config-daemon-khn68" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 18:28:58 crc kubenswrapper[4753]: I1205 18:28:58.980672 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerName="machine-config-daemon" containerID="cri-o://703e56c5800061dd9ae2980dc51adb5fc042aab3223556efd2197f0e8183ae47" gracePeriod=600 Dec 05 18:28:59 crc kubenswrapper[4753]: I1205 18:28:59.293206 4753 generic.go:334] "Generic (PLEG): container finished" podID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerID="703e56c5800061dd9ae2980dc51adb5fc042aab3223556efd2197f0e8183ae47" exitCode=0 Dec 05 18:28:59 crc kubenswrapper[4753]: I1205 18:28:59.293322 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" event={"ID":"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68","Type":"ContainerDied","Data":"703e56c5800061dd9ae2980dc51adb5fc042aab3223556efd2197f0e8183ae47"} Dec 05 18:28:59 crc kubenswrapper[4753]: I1205 18:28:59.293529 4753 scope.go:117] "RemoveContainer" containerID="cc0f1cc17aa951c21ac505d180d094261b7dca04b8a1e989424ff5d5fd93fc78" Dec 05 18:29:00 crc kubenswrapper[4753]: I1205 18:29:00.307137 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-khn68" event={"ID":"3fc2db1a-9f5e-4f36-b713-1a385f3a2d68","Type":"ContainerStarted","Data":"f39c7b89147f6ac6564480117c08e200893ccbfbfdfebc8b383cb3714f6eb7ed"} Dec 05 18:29:34 crc kubenswrapper[4753]: I1205 18:29:34.253269 4753 scope.go:117] "RemoveContainer" containerID="2781a2e6d684b481d79393592fe10e131f7e2b1711118afe5519225351df8ec4" Dec 05 18:29:34 crc kubenswrapper[4753]: I1205 18:29:34.582083 4753 scope.go:117] "RemoveContainer" containerID="c9112a544289a4006df824305ba8f04a295d445c83710039afde5a5ad600c075" Dec 05 18:29:34 crc kubenswrapper[4753]: I1205 18:29:34.643738 4753 scope.go:117] "RemoveContainer" containerID="8674dba6742181e4c8a8f69d60351149802eceacd9f1e4e1024a194e819d91e0" Dec 05 18:30:00 crc kubenswrapper[4753]: I1205 18:30:00.160453 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415990-wd7l6"] Dec 05 18:30:00 crc kubenswrapper[4753]: E1205 18:30:00.161472 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4dcb652d-381d-49fd-8486-20a269fbe3ac" containerName="registry-server" Dec 05 18:30:00 crc kubenswrapper[4753]: I1205 18:30:00.161487 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="4dcb652d-381d-49fd-8486-20a269fbe3ac" containerName="registry-server" Dec 05 18:30:00 crc kubenswrapper[4753]: E1205 18:30:00.161512 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8ff9c27-b92d-4c30-ba62-164ea425f02f" containerName="copy" Dec 05 18:30:00 crc kubenswrapper[4753]: I1205 18:30:00.161520 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8ff9c27-b92d-4c30-ba62-164ea425f02f" containerName="copy" Dec 05 18:30:00 crc kubenswrapper[4753]: E1205 18:30:00.161550 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4dcb652d-381d-49fd-8486-20a269fbe3ac" containerName="extract-content" Dec 05 18:30:00 crc kubenswrapper[4753]: I1205 18:30:00.161559 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="4dcb652d-381d-49fd-8486-20a269fbe3ac" containerName="extract-content" Dec 05 18:30:00 crc kubenswrapper[4753]: E1205 18:30:00.161575 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8ff9c27-b92d-4c30-ba62-164ea425f02f" containerName="gather" Dec 05 18:30:00 crc kubenswrapper[4753]: I1205 18:30:00.161583 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8ff9c27-b92d-4c30-ba62-164ea425f02f" containerName="gather" Dec 05 18:30:00 crc kubenswrapper[4753]: E1205 18:30:00.161601 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4dcb652d-381d-49fd-8486-20a269fbe3ac" containerName="extract-utilities" Dec 05 18:30:00 crc kubenswrapper[4753]: I1205 18:30:00.161609 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="4dcb652d-381d-49fd-8486-20a269fbe3ac" containerName="extract-utilities" Dec 05 18:30:00 crc kubenswrapper[4753]: I1205 18:30:00.161846 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="4dcb652d-381d-49fd-8486-20a269fbe3ac" containerName="registry-server" Dec 05 18:30:00 crc kubenswrapper[4753]: I1205 18:30:00.161862 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="d8ff9c27-b92d-4c30-ba62-164ea425f02f" containerName="gather" Dec 05 18:30:00 crc kubenswrapper[4753]: I1205 18:30:00.161885 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="d8ff9c27-b92d-4c30-ba62-164ea425f02f" containerName="copy" Dec 05 18:30:00 crc kubenswrapper[4753]: I1205 18:30:00.163000 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415990-wd7l6" Dec 05 18:30:00 crc kubenswrapper[4753]: I1205 18:30:00.165933 4753 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 18:30:00 crc kubenswrapper[4753]: I1205 18:30:00.166360 4753 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 18:30:00 crc kubenswrapper[4753]: I1205 18:30:00.171804 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415990-wd7l6"] Dec 05 18:30:00 crc kubenswrapper[4753]: I1205 18:30:00.214181 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7d9f4e00-2277-4ab5-a870-f95be67f7a4d-config-volume\") pod \"collect-profiles-29415990-wd7l6\" (UID: \"7d9f4e00-2277-4ab5-a870-f95be67f7a4d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415990-wd7l6" Dec 05 18:30:00 crc kubenswrapper[4753]: I1205 18:30:00.214266 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g5vkb\" (UniqueName: \"kubernetes.io/projected/7d9f4e00-2277-4ab5-a870-f95be67f7a4d-kube-api-access-g5vkb\") pod \"collect-profiles-29415990-wd7l6\" (UID: \"7d9f4e00-2277-4ab5-a870-f95be67f7a4d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415990-wd7l6" Dec 05 18:30:00 crc kubenswrapper[4753]: I1205 18:30:00.214415 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7d9f4e00-2277-4ab5-a870-f95be67f7a4d-secret-volume\") pod \"collect-profiles-29415990-wd7l6\" (UID: \"7d9f4e00-2277-4ab5-a870-f95be67f7a4d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415990-wd7l6" Dec 05 18:30:00 crc kubenswrapper[4753]: I1205 18:30:00.317521 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7d9f4e00-2277-4ab5-a870-f95be67f7a4d-secret-volume\") pod \"collect-profiles-29415990-wd7l6\" (UID: \"7d9f4e00-2277-4ab5-a870-f95be67f7a4d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415990-wd7l6" Dec 05 18:30:00 crc kubenswrapper[4753]: I1205 18:30:00.317741 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7d9f4e00-2277-4ab5-a870-f95be67f7a4d-config-volume\") pod \"collect-profiles-29415990-wd7l6\" (UID: \"7d9f4e00-2277-4ab5-a870-f95be67f7a4d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415990-wd7l6" Dec 05 18:30:00 crc kubenswrapper[4753]: I1205 18:30:00.317821 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g5vkb\" (UniqueName: \"kubernetes.io/projected/7d9f4e00-2277-4ab5-a870-f95be67f7a4d-kube-api-access-g5vkb\") pod \"collect-profiles-29415990-wd7l6\" (UID: \"7d9f4e00-2277-4ab5-a870-f95be67f7a4d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415990-wd7l6" Dec 05 18:30:00 crc kubenswrapper[4753]: I1205 18:30:00.318855 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7d9f4e00-2277-4ab5-a870-f95be67f7a4d-config-volume\") pod \"collect-profiles-29415990-wd7l6\" (UID: \"7d9f4e00-2277-4ab5-a870-f95be67f7a4d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415990-wd7l6" Dec 05 18:30:00 crc kubenswrapper[4753]: I1205 18:30:00.334005 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7d9f4e00-2277-4ab5-a870-f95be67f7a4d-secret-volume\") pod \"collect-profiles-29415990-wd7l6\" (UID: \"7d9f4e00-2277-4ab5-a870-f95be67f7a4d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415990-wd7l6" Dec 05 18:30:00 crc kubenswrapper[4753]: I1205 18:30:00.340053 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g5vkb\" (UniqueName: \"kubernetes.io/projected/7d9f4e00-2277-4ab5-a870-f95be67f7a4d-kube-api-access-g5vkb\") pod \"collect-profiles-29415990-wd7l6\" (UID: \"7d9f4e00-2277-4ab5-a870-f95be67f7a4d\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415990-wd7l6" Dec 05 18:30:00 crc kubenswrapper[4753]: I1205 18:30:00.494403 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415990-wd7l6" Dec 05 18:30:00 crc kubenswrapper[4753]: I1205 18:30:00.958734 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415990-wd7l6"] Dec 05 18:30:00 crc kubenswrapper[4753]: W1205 18:30:00.965323 4753 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7d9f4e00_2277_4ab5_a870_f95be67f7a4d.slice/crio-420cee003419f66f63aa9d1ce38a250f15abe2abe149eb7ad248e3b0b0a81f68 WatchSource:0}: Error finding container 420cee003419f66f63aa9d1ce38a250f15abe2abe149eb7ad248e3b0b0a81f68: Status 404 returned error can't find the container with id 420cee003419f66f63aa9d1ce38a250f15abe2abe149eb7ad248e3b0b0a81f68 Dec 05 18:30:01 crc kubenswrapper[4753]: I1205 18:30:01.976425 4753 generic.go:334] "Generic (PLEG): container finished" podID="7d9f4e00-2277-4ab5-a870-f95be67f7a4d" containerID="a86c6f78ca7b2dc06a3ecc7bc2d4407a957547196bfd69822144ced9bd89c1b6" exitCode=0 Dec 05 18:30:01 crc kubenswrapper[4753]: I1205 18:30:01.976516 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415990-wd7l6" event={"ID":"7d9f4e00-2277-4ab5-a870-f95be67f7a4d","Type":"ContainerDied","Data":"a86c6f78ca7b2dc06a3ecc7bc2d4407a957547196bfd69822144ced9bd89c1b6"} Dec 05 18:30:01 crc kubenswrapper[4753]: I1205 18:30:01.976720 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415990-wd7l6" event={"ID":"7d9f4e00-2277-4ab5-a870-f95be67f7a4d","Type":"ContainerStarted","Data":"420cee003419f66f63aa9d1ce38a250f15abe2abe149eb7ad248e3b0b0a81f68"} Dec 05 18:30:03 crc kubenswrapper[4753]: I1205 18:30:03.487968 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415990-wd7l6" Dec 05 18:30:03 crc kubenswrapper[4753]: I1205 18:30:03.594814 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7d9f4e00-2277-4ab5-a870-f95be67f7a4d-config-volume\") pod \"7d9f4e00-2277-4ab5-a870-f95be67f7a4d\" (UID: \"7d9f4e00-2277-4ab5-a870-f95be67f7a4d\") " Dec 05 18:30:03 crc kubenswrapper[4753]: I1205 18:30:03.595072 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7d9f4e00-2277-4ab5-a870-f95be67f7a4d-secret-volume\") pod \"7d9f4e00-2277-4ab5-a870-f95be67f7a4d\" (UID: \"7d9f4e00-2277-4ab5-a870-f95be67f7a4d\") " Dec 05 18:30:03 crc kubenswrapper[4753]: I1205 18:30:03.595177 4753 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g5vkb\" (UniqueName: \"kubernetes.io/projected/7d9f4e00-2277-4ab5-a870-f95be67f7a4d-kube-api-access-g5vkb\") pod \"7d9f4e00-2277-4ab5-a870-f95be67f7a4d\" (UID: \"7d9f4e00-2277-4ab5-a870-f95be67f7a4d\") " Dec 05 18:30:03 crc kubenswrapper[4753]: I1205 18:30:03.596128 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7d9f4e00-2277-4ab5-a870-f95be67f7a4d-config-volume" (OuterVolumeSpecName: "config-volume") pod "7d9f4e00-2277-4ab5-a870-f95be67f7a4d" (UID: "7d9f4e00-2277-4ab5-a870-f95be67f7a4d"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 18:30:03 crc kubenswrapper[4753]: I1205 18:30:03.604809 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7d9f4e00-2277-4ab5-a870-f95be67f7a4d-kube-api-access-g5vkb" (OuterVolumeSpecName: "kube-api-access-g5vkb") pod "7d9f4e00-2277-4ab5-a870-f95be67f7a4d" (UID: "7d9f4e00-2277-4ab5-a870-f95be67f7a4d"). InnerVolumeSpecName "kube-api-access-g5vkb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 18:30:03 crc kubenswrapper[4753]: I1205 18:30:03.615364 4753 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d9f4e00-2277-4ab5-a870-f95be67f7a4d-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "7d9f4e00-2277-4ab5-a870-f95be67f7a4d" (UID: "7d9f4e00-2277-4ab5-a870-f95be67f7a4d"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:30:03 crc kubenswrapper[4753]: I1205 18:30:03.698141 4753 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7d9f4e00-2277-4ab5-a870-f95be67f7a4d-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 18:30:03 crc kubenswrapper[4753]: I1205 18:30:03.698210 4753 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/7d9f4e00-2277-4ab5-a870-f95be67f7a4d-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 18:30:03 crc kubenswrapper[4753]: I1205 18:30:03.698230 4753 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g5vkb\" (UniqueName: \"kubernetes.io/projected/7d9f4e00-2277-4ab5-a870-f95be67f7a4d-kube-api-access-g5vkb\") on node \"crc\" DevicePath \"\"" Dec 05 18:30:03 crc kubenswrapper[4753]: I1205 18:30:03.998948 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415990-wd7l6" event={"ID":"7d9f4e00-2277-4ab5-a870-f95be67f7a4d","Type":"ContainerDied","Data":"420cee003419f66f63aa9d1ce38a250f15abe2abe149eb7ad248e3b0b0a81f68"} Dec 05 18:30:03 crc kubenswrapper[4753]: I1205 18:30:03.999009 4753 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="420cee003419f66f63aa9d1ce38a250f15abe2abe149eb7ad248e3b0b0a81f68" Dec 05 18:30:03 crc kubenswrapper[4753]: I1205 18:30:03.999010 4753 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415990-wd7l6" Dec 05 18:30:04 crc kubenswrapper[4753]: I1205 18:30:04.561073 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415945-22sp8"] Dec 05 18:30:04 crc kubenswrapper[4753]: I1205 18:30:04.569531 4753 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415945-22sp8"] Dec 05 18:30:05 crc kubenswrapper[4753]: I1205 18:30:05.759081 4753 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="55491de8-76f7-4830-b11e-f0c2e18845ae" path="/var/lib/kubelet/pods/55491de8-76f7-4830-b11e-f0c2e18845ae/volumes" Dec 05 18:30:34 crc kubenswrapper[4753]: I1205 18:30:34.696788 4753 scope.go:117] "RemoveContainer" containerID="3d0f6beb0e80532da3ddaaa2c7457e5fd4e4970b034c9f63fa7a42403fbef956" Dec 05 18:31:28 crc kubenswrapper[4753]: I1205 18:31:28.979408 4753 patch_prober.go:28] interesting pod/machine-config-daemon-khn68 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 18:31:28 crc kubenswrapper[4753]: I1205 18:31:28.980038 4753 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-khn68" podUID="3fc2db1a-9f5e-4f36-b713-1a385f3a2d68" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 18:31:28 crc kubenswrapper[4753]: I1205 18:31:28.990766 4753 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-kl7f6"] Dec 05 18:31:28 crc kubenswrapper[4753]: E1205 18:31:28.991538 4753 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d9f4e00-2277-4ab5-a870-f95be67f7a4d" containerName="collect-profiles" Dec 05 18:31:28 crc kubenswrapper[4753]: I1205 18:31:28.991574 4753 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d9f4e00-2277-4ab5-a870-f95be67f7a4d" containerName="collect-profiles" Dec 05 18:31:28 crc kubenswrapper[4753]: I1205 18:31:28.991992 4753 memory_manager.go:354] "RemoveStaleState removing state" podUID="7d9f4e00-2277-4ab5-a870-f95be67f7a4d" containerName="collect-profiles" Dec 05 18:31:28 crc kubenswrapper[4753]: I1205 18:31:28.995498 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kl7f6" Dec 05 18:31:29 crc kubenswrapper[4753]: I1205 18:31:29.024342 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-kl7f6"] Dec 05 18:31:29 crc kubenswrapper[4753]: I1205 18:31:29.075998 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g76v8\" (UniqueName: \"kubernetes.io/projected/2dd7e4b1-2abd-482a-bbd2-2d597eb4ae2d-kube-api-access-g76v8\") pod \"redhat-marketplace-kl7f6\" (UID: \"2dd7e4b1-2abd-482a-bbd2-2d597eb4ae2d\") " pod="openshift-marketplace/redhat-marketplace-kl7f6" Dec 05 18:31:29 crc kubenswrapper[4753]: I1205 18:31:29.076101 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2dd7e4b1-2abd-482a-bbd2-2d597eb4ae2d-utilities\") pod \"redhat-marketplace-kl7f6\" (UID: \"2dd7e4b1-2abd-482a-bbd2-2d597eb4ae2d\") " pod="openshift-marketplace/redhat-marketplace-kl7f6" Dec 05 18:31:29 crc kubenswrapper[4753]: I1205 18:31:29.076130 4753 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2dd7e4b1-2abd-482a-bbd2-2d597eb4ae2d-catalog-content\") pod \"redhat-marketplace-kl7f6\" (UID: \"2dd7e4b1-2abd-482a-bbd2-2d597eb4ae2d\") " pod="openshift-marketplace/redhat-marketplace-kl7f6" Dec 05 18:31:29 crc kubenswrapper[4753]: I1205 18:31:29.179063 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2dd7e4b1-2abd-482a-bbd2-2d597eb4ae2d-utilities\") pod \"redhat-marketplace-kl7f6\" (UID: \"2dd7e4b1-2abd-482a-bbd2-2d597eb4ae2d\") " pod="openshift-marketplace/redhat-marketplace-kl7f6" Dec 05 18:31:29 crc kubenswrapper[4753]: I1205 18:31:29.179373 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2dd7e4b1-2abd-482a-bbd2-2d597eb4ae2d-catalog-content\") pod \"redhat-marketplace-kl7f6\" (UID: \"2dd7e4b1-2abd-482a-bbd2-2d597eb4ae2d\") " pod="openshift-marketplace/redhat-marketplace-kl7f6" Dec 05 18:31:29 crc kubenswrapper[4753]: I1205 18:31:29.179645 4753 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g76v8\" (UniqueName: \"kubernetes.io/projected/2dd7e4b1-2abd-482a-bbd2-2d597eb4ae2d-kube-api-access-g76v8\") pod \"redhat-marketplace-kl7f6\" (UID: \"2dd7e4b1-2abd-482a-bbd2-2d597eb4ae2d\") " pod="openshift-marketplace/redhat-marketplace-kl7f6" Dec 05 18:31:29 crc kubenswrapper[4753]: I1205 18:31:29.180079 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2dd7e4b1-2abd-482a-bbd2-2d597eb4ae2d-catalog-content\") pod \"redhat-marketplace-kl7f6\" (UID: \"2dd7e4b1-2abd-482a-bbd2-2d597eb4ae2d\") " pod="openshift-marketplace/redhat-marketplace-kl7f6" Dec 05 18:31:29 crc kubenswrapper[4753]: I1205 18:31:29.180106 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2dd7e4b1-2abd-482a-bbd2-2d597eb4ae2d-utilities\") pod \"redhat-marketplace-kl7f6\" (UID: \"2dd7e4b1-2abd-482a-bbd2-2d597eb4ae2d\") " pod="openshift-marketplace/redhat-marketplace-kl7f6" Dec 05 18:31:29 crc kubenswrapper[4753]: I1205 18:31:29.201449 4753 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g76v8\" (UniqueName: \"kubernetes.io/projected/2dd7e4b1-2abd-482a-bbd2-2d597eb4ae2d-kube-api-access-g76v8\") pod \"redhat-marketplace-kl7f6\" (UID: \"2dd7e4b1-2abd-482a-bbd2-2d597eb4ae2d\") " pod="openshift-marketplace/redhat-marketplace-kl7f6" Dec 05 18:31:29 crc kubenswrapper[4753]: I1205 18:31:29.325713 4753 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kl7f6" Dec 05 18:31:29 crc kubenswrapper[4753]: I1205 18:31:29.872403 4753 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-kl7f6"] Dec 05 18:31:30 crc kubenswrapper[4753]: I1205 18:31:30.025648 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kl7f6" event={"ID":"2dd7e4b1-2abd-482a-bbd2-2d597eb4ae2d","Type":"ContainerStarted","Data":"b5ce64c3580754c07bc1e80161e5e85cf23ef0f9807d94f84aacd5c280ec8f4a"} Dec 05 18:31:31 crc kubenswrapper[4753]: I1205 18:31:31.038660 4753 generic.go:334] "Generic (PLEG): container finished" podID="2dd7e4b1-2abd-482a-bbd2-2d597eb4ae2d" containerID="a1e2d0a959bce42b713e781756143eb6b7b9dc7289961555ce3ca2b11b326c95" exitCode=0 Dec 05 18:31:31 crc kubenswrapper[4753]: I1205 18:31:31.038739 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kl7f6" event={"ID":"2dd7e4b1-2abd-482a-bbd2-2d597eb4ae2d","Type":"ContainerDied","Data":"a1e2d0a959bce42b713e781756143eb6b7b9dc7289961555ce3ca2b11b326c95"} Dec 05 18:31:31 crc kubenswrapper[4753]: I1205 18:31:31.041942 4753 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 18:31:32 crc kubenswrapper[4753]: I1205 18:31:32.052464 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kl7f6" event={"ID":"2dd7e4b1-2abd-482a-bbd2-2d597eb4ae2d","Type":"ContainerStarted","Data":"f115e83c263e3b57f8a0eb6eedf7ce948cb57548656efacf840911477ccb62f8"} Dec 05 18:31:33 crc kubenswrapper[4753]: I1205 18:31:33.067200 4753 generic.go:334] "Generic (PLEG): container finished" podID="2dd7e4b1-2abd-482a-bbd2-2d597eb4ae2d" containerID="f115e83c263e3b57f8a0eb6eedf7ce948cb57548656efacf840911477ccb62f8" exitCode=0 Dec 05 18:31:33 crc kubenswrapper[4753]: I1205 18:31:33.067500 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kl7f6" event={"ID":"2dd7e4b1-2abd-482a-bbd2-2d597eb4ae2d","Type":"ContainerDied","Data":"f115e83c263e3b57f8a0eb6eedf7ce948cb57548656efacf840911477ccb62f8"} Dec 05 18:31:34 crc kubenswrapper[4753]: I1205 18:31:34.080911 4753 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kl7f6" event={"ID":"2dd7e4b1-2abd-482a-bbd2-2d597eb4ae2d","Type":"ContainerStarted","Data":"656afb1b914a003f1c2caf107bab4f0687116011e2d9e5cd4a30bdc814203f00"} Dec 05 18:31:34 crc kubenswrapper[4753]: I1205 18:31:34.113024 4753 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-kl7f6" podStartSLOduration=3.676091229 podStartE2EDuration="6.112995439s" podCreationTimestamp="2025-12-05 18:31:28 +0000 UTC" firstStartedPulling="2025-12-05 18:31:31.041622245 +0000 UTC m=+5229.544729261" lastFinishedPulling="2025-12-05 18:31:33.478526455 +0000 UTC m=+5231.981633471" observedRunningTime="2025-12-05 18:31:34.095093712 +0000 UTC m=+5232.598200718" watchObservedRunningTime="2025-12-05 18:31:34.112995439 +0000 UTC m=+5232.616102445" Dec 05 18:31:39 crc kubenswrapper[4753]: I1205 18:31:39.326329 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-kl7f6" Dec 05 18:31:39 crc kubenswrapper[4753]: I1205 18:31:39.326853 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-kl7f6" Dec 05 18:31:39 crc kubenswrapper[4753]: I1205 18:31:39.406934 4753 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-kl7f6" Dec 05 18:31:40 crc kubenswrapper[4753]: I1205 18:31:40.210438 4753 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-kl7f6" Dec 05 18:31:40 crc kubenswrapper[4753]: I1205 18:31:40.276993 4753 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-kl7f6"] Dec 05 18:31:42 crc kubenswrapper[4753]: I1205 18:31:42.173837 4753 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-kl7f6" podUID="2dd7e4b1-2abd-482a-bbd2-2d597eb4ae2d" containerName="registry-server" containerID="cri-o://656afb1b914a003f1c2caf107bab4f0687116011e2d9e5cd4a30bdc814203f00" gracePeriod=2 var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515114622424024446 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015114622424017363 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015114607725016515 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015114607726015466 5ustar corecore